diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects.py b/packages/models-library/src/models_library/api_schemas_webserver/projects.py index 39bdfe4be26..6ef501c2311 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects.py @@ -278,6 +278,7 @@ class ProjectPatch(InputSchema): ] = None quality: dict[str, Any] | None = None template_type: ProjectTemplateType | None = None + hidden: bool | None = None def to_domain_model(self) -> dict[str, Any]: return self.model_dump(exclude_unset=True, by_alias=False) diff --git a/packages/models-library/src/models_library/rpc/webserver/projects.py b/packages/models-library/src/models_library/rpc/webserver/projects.py index d1bfff34213..e136b1fd99d 100644 --- a/packages/models-library/src/models_library/rpc/webserver/projects.py +++ b/packages/models-library/src/models_library/rpc/webserver/projects.py @@ -90,6 +90,7 @@ class ProjectJobRpcGet(BaseModel): # Specific to jobs job_parent_resource_name: str + storage_assets_deleted: bool @staticmethod def _update_json_schema_extra(schema: JsonDict) -> None: @@ -105,6 +106,7 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "created_at": "2023-01-01T00:00:00Z", "modified_at": "2023-01-01T00:00:00Z", "job_parent_resource_name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.2", + "storage_assets_deleted": "false", }, { "uuid": "00000000-1234-5678-1234-123456789012", @@ -114,6 +116,7 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "created_at": "2023-02-01T00:00:00Z", "modified_at": "2023-02-01T00:00:00Z", "job_parent_resource_name": "studies/96642f2a-a72c-11ef-8776-02420a00087d", + "storage_assets_deleted": "true", }, { "uuid": "00000000-0000-5678-1234-123456789012", @@ -123,6 +126,7 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "created_at": "2023-03-01T00:00:00Z", "modified_at": "2023-03-01T00:00:00Z", "job_parent_resource_name": "program/simcore%2Fservices%2Fdynamic%2Fjupyter/releases/5.0.2", + "storage_assets_deleted": "false", }, ] } diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/5b998370916a_introduce_data_deleted_in_projects_to_.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/5b998370916a_introduce_data_deleted_in_projects_to_.py new file mode 100644 index 00000000000..95457ee0c4c --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/5b998370916a_introduce_data_deleted_in_projects_to_.py @@ -0,0 +1,40 @@ +"""introduce data_deleted in projects_to_jobs table + +Revision ID: 5b998370916a +Revises: 5679165336c8 +Create Date: 2025-08-11 13:58:38.424398+00:00 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "5b998370916a" +down_revision = "5679165336c8" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "projects_to_jobs", + sa.Column("storage_assets_deleted", sa.Boolean(), nullable=True), + ) + + op.execute("UPDATE projects_to_jobs SET storage_assets_deleted = false") + + op.alter_column( + "projects_to_jobs", + "storage_assets_deleted", + existing_type=sa.BOOLEAN(), + nullable=False, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("projects_to_jobs", "storage_assets_deleted") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/projects_to_jobs.py b/packages/postgres-database/src/simcore_postgres_database/models/projects_to_jobs.py index 4f3859fb36e..4013cf9e435 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/projects_to_jobs.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/projects_to_jobs.py @@ -28,6 +28,12 @@ "the relative resource name is shelves/shelf1/jobs/job2, " "the parent resource name is shelves/shelf1.", ), + sa.Column( + "storage_assets_deleted", + sa.Boolean, + nullable=False, + doc="Indicates whether the job's S3 assets have been actively deleted.", + ), # Composite key (project_uuid, job_parent_resource_name) uniquely identifies very row sa.UniqueConstraint( "project_uuid", diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_rpc_server.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_rpc_server.py index 17d8051d096..3cedcff8aba 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_rpc_server.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_rpc_server.py @@ -26,6 +26,14 @@ class WebserverRpcSideEffects: # pylint: disable=no-self-use + def __init__( + self, + project_job_rpc_get: ProjectJobRpcGet = ProjectJobRpcGet.model_validate( + ProjectJobRpcGet.model_json_schema()["examples"][0] + ), + ): + self.project_job_rpc_get = project_job_rpc_get + @validate_call(config={"arbitrary_types_allowed": True}) async def mark_project_as_job( self, @@ -35,12 +43,14 @@ async def mark_project_as_job( user_id: UserID, project_uuid: ProjectID, job_parent_resource_name: str, + storage_assets_deleted: bool, ) -> None: assert rpc_client assert not job_parent_resource_name.startswith("/") # nosec assert "/" in job_parent_resource_name # nosec assert not job_parent_resource_name.endswith("/") # nosec + assert isinstance(storage_assets_deleted, bool) assert product_name assert user_id @@ -84,3 +94,25 @@ async def list_projects_marked_as_jobs( limit=limit, offset=offset, ) + + @validate_call(config={"arbitrary_types_allowed": True}) + async def get_project_marked_as_job( + self, + rpc_client: RabbitMQRPCClient | MockType, + *, + product_name: ProductName, + user_id: UserID, + project_uuid: ProjectID, + job_parent_resource_name: str, + ) -> ProjectJobRpcGet: + assert rpc_client + assert product_name + assert user_id + assert project_uuid + assert job_parent_resource_name + + # Return a valid example from the schema + _data = self.project_job_rpc_get.model_dump() + _data["uuid"] = str(project_uuid) + _data["job_parent_resource_name"] = job_parent_resource_name + return ProjectJobRpcGet.model_validate(_data) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/projects.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/projects.py index 1f01f453036..eabc7a18d7d 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/projects.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/projects.py @@ -9,6 +9,7 @@ from models_library.rpc.webserver.projects import ( ListProjectsMarkedAsJobRpcFilters, PageRpcProjectJobRpcGet, + ProjectJobRpcGet, ) from models_library.rpc_pagination import ( DEFAULT_NUMBER_OF_ITEMS_PER_PAGE, @@ -32,6 +33,7 @@ async def mark_project_as_job( user_id: UserID, project_uuid: ProjectID, job_parent_resource_name: str, + storage_assets_deleted: bool, ) -> None: result = await rpc_client.request( @@ -41,6 +43,7 @@ async def mark_project_as_job( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=storage_assets_deleted, ) assert result is None @@ -68,3 +71,25 @@ async def list_projects_marked_as_jobs( ) assert TypeAdapter(PageRpcProjectJobRpcGet).validate_python(result) # nosec return cast(PageRpcProjectJobRpcGet, result) + + +@log_decorator(_logger, level=logging.DEBUG) +@validate_call(config={"arbitrary_types_allowed": True}) +async def get_project_marked_as_job( + rpc_client: RabbitMQRPCClient, + *, + product_name: ProductName, + user_id: UserID, + project_uuid: ProjectID, + job_parent_resource_name: str, +) -> ProjectJobRpcGet: + result = await rpc_client.request( + WEBSERVER_RPC_NAMESPACE, + TypeAdapter(RPCMethodName).validate_python("get_project_marked_as_job"), + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + assert TypeAdapter(ProjectJobRpcGet).validate_python(result) # nosec + return cast(ProjectJobRpcGet, result) diff --git a/services/api-server/VERSION b/services/api-server/VERSION index d9df1bbc0c7..ac454c6a1fc 100644 --- a/services/api-server/VERSION +++ b/services/api-server/VERSION @@ -1 +1 @@ -0.11.0 +0.12.0 diff --git a/services/api-server/openapi.json b/services/api-server/openapi.json index 0f0e7029858..cc30044fded 100644 --- a/services/api-server/openapi.json +++ b/services/api-server/openapi.json @@ -3,7 +3,7 @@ "info": { "title": "osparc.io public API", "description": "osparc-simcore public API specifications", - "version": "0.11.0" + "version": "0.12.0" }, "paths": { "/v0/meta": { @@ -2761,6 +2761,138 @@ } } }, + "/v0/solvers/{solver_key}/releases/{version}/jobs/{job_id}/assets": { + "delete": { + "tags": [ + "solvers" + ], + "summary": "Delete Job Assets", + "description": "Deletes assets associated with an existing solver job. N.B. this renders the solver job un-startable\n\nNew in *version 0.12*", + "operationId": "delete_job_assets", + "security": [ + { + "HTTPBasic": [] + } + ], + "parameters": [ + { + "name": "solver_key", + "in": "path", + "required": true, + "schema": { + "type": "string", + "pattern": "^simcore/services/comp/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", + "title": "Solver Key" + } + }, + { + "name": "version", + "in": "path", + "required": true, + "schema": { + "type": "string", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "title": "Version" + } + }, + { + "name": "job_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Job Id" + } + } + ], + "responses": { + "204": { + "description": "Successful Response" + }, + "402": { + "description": "Payment required", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "404": { + "description": "Job/wallet/pricing details not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "429": { + "description": "Too many requests", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "502": { + "description": "Unexpected error when communicating with backend service", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "503": { + "description": "Service unavailable", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "504": { + "description": "Request to a backend service timed out.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, "/v0/solvers/{solver_key}/releases/{version}/jobs/{job_id}:start": { "post": { "tags": [ @@ -2926,6 +3058,16 @@ } } }, + "409": { + "description": "Job assets missing", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, "422": { "description": "Configuration error", "content": { @@ -3760,6 +3902,16 @@ } } }, + "409": { + "description": "Job assets missing", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, "422": { "description": "Validation Error", "content": { diff --git a/services/api-server/setup.cfg b/services/api-server/setup.cfg index 6e099f77c21..b6c8e79440d 100644 --- a/services/api-server/setup.cfg +++ b/services/api-server/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.11.0 +current_version = 0.12.0 commit = True message = services/api-server version: {current_version} → {new_version} tag = False diff --git a/services/api-server/src/simcore_service_api_server/_service_jobs.py b/services/api-server/src/simcore_service_api_server/_service_jobs.py index a92ccb41fa0..67aa1171433 100644 --- a/services/api-server/src/simcore_service_api_server/_service_jobs.py +++ b/services/api-server/src/simcore_service_api_server/_service_jobs.py @@ -5,7 +5,11 @@ from common_library.exclude import as_dict_exclude_none from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobGet -from models_library.api_schemas_webserver.projects import ProjectCreateNew, ProjectGet +from models_library.api_schemas_webserver.projects import ( + ProjectCreateNew, + ProjectGet, + ProjectPatch, +) from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -13,11 +17,13 @@ PageMetaInfoLimitOffset, PageOffsetInt, ) +from models_library.rpc.webserver.projects import ProjectJobRpcGet from models_library.rpc_pagination import PageLimitInt from models_library.users import UserID from pydantic import HttpUrl from servicelib.logging_utils import log_context +from .models.api_resources import RelativeResourceName from .models.basic_types import NameValueTuple from .models.schemas.jobs import Job, JobID, JobInputs from .models.schemas.programs import Program @@ -27,6 +33,7 @@ create_job_inputs_from_node_inputs, create_new_project_for_job, ) +from .services_http.storage import StorageApi from .services_http.webserver import AuthSession from .services_rpc.director_v2 import DirectorV2Service from .services_rpc.storage import StorageService @@ -40,6 +47,7 @@ class JobService: _web_rest_client: AuthSession _web_rpc_client: WbApiRpcClient _storage_rpc_client: StorageService + _storage_rest_client: StorageApi _directorv2_rpc_client: DirectorV2Service user_id: UserID product_name: ProductName @@ -137,6 +145,7 @@ async def create_job( user_id=self.user_id, project_uuid=new_project.uuid, job_parent_resource_name=pre_job.runner_name, + storage_assets_deleted=False, ) assert new_project # nosec @@ -167,3 +176,32 @@ async def start_log_export( ], ) return async_job_get + + async def get_job( + self, job_parent_resource_name: RelativeResourceName, job_id: JobID + ) -> ProjectJobRpcGet: + """This method can be used to check that the project exists and has the correct parent resource.""" + return await self._web_rpc_client.get_project_marked_as_job( + product_name=self.product_name, + user_id=self.user_id, + project_id=job_id, + job_parent_resource_name=job_parent_resource_name, + ) + + async def delete_job_assets( + self, job_parent_resource_name: RelativeResourceName, job_id: JobID + ): + """Marks job project as hidden and deletes S3 assets associated it""" + await self._web_rest_client.patch_project( + project_id=job_id, patch_params=ProjectPatch(hidden=True) + ) + await self._storage_rest_client.delete_project_s3_assets( + user_id=self.user_id, project_id=job_id + ) + await self._web_rpc_client.mark_project_as_job( + product_name=self.product_name, + user_id=self.user_id, + project_uuid=job_id, + job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=True, + ) diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/services.py b/services/api-server/src/simcore_service_api_server/api/dependencies/services.py index 565ec16b0e6..fe5c30ceb16 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/services.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/services.py @@ -14,6 +14,7 @@ from ..._service_programs import ProgramService from ..._service_solvers import SolverService from ..._service_studies import StudyService +from ...services_http.storage import StorageApi from ...services_http.webserver import AuthSession from ...services_rpc.catalog import CatalogService from ...services_rpc.director_v2 import DirectorV2Service @@ -86,7 +87,8 @@ def get_directorv2_service( def get_job_service( web_rest_api: Annotated[AuthSession, Depends(get_webserver_session)], web_rpc_api: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], - storage_service: Annotated[StorageService, Depends(get_storage_service)], + storage_rpc_client: Annotated[StorageService, Depends(get_storage_service)], + storage_rest_client: Annotated[StorageApi, Depends(get_api_client(StorageApi))], directorv2_service: Annotated[DirectorV2Service, Depends(get_directorv2_service)], user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], @@ -98,7 +100,8 @@ def get_job_service( return JobService( _web_rest_client=web_rest_api, _web_rpc_client=web_rpc_api, - _storage_rpc_client=storage_service, + _storage_rpc_client=storage_rpc_client, + _storage_rest_client=storage_rest_client, _directorv2_rpc_client=directorv2_service, user_id=user_id, product_name=product_name, diff --git a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py index 9a34c4dd1fa..170dfe52614 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py @@ -294,6 +294,7 @@ async def function_job_outputs( user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], storage_client: Annotated[StorageApi, Depends(get_api_client(StorageApi))], + job_service: Annotated[JobService, Depends(get_job_service)], wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], async_pg_engine: Annotated[AsyncEngine, Depends(get_db_asyncpg_engine)], stored_job_outputs: Annotated[FunctionOutputs, Depends(get_stored_job_outputs)], @@ -329,6 +330,7 @@ async def function_job_outputs( user_id=user_id, webserver_api=webserver_api, storage_client=storage_client, + job_service=job_service, async_pg_engine=async_pg_engine, ) ).results diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index c1fafbae4df..f3799a97c5b 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -509,6 +509,7 @@ async def run_function( # noqa: PLR0913 user_id=user_id, webserver_api=webserver_api, director2_api=director2_api, + job_service=job_service, ) return await register_function_job( wb_api_rpc=wb_api_rpc, diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py index 83b5d83f5f0..7e8bd40eb1c 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py @@ -4,7 +4,7 @@ from collections.abc import Callable from typing import Annotated, Any -from fastapi import APIRouter, Depends, Header, Query, Request, status +from fastapi import APIRouter, Depends, Header, HTTPException, Query, Request, status from fastapi.encoders import jsonable_encoder from fastapi.responses import JSONResponse from models_library.clusters import ClusterID @@ -151,6 +151,36 @@ async def delete_job( await webserver_api.delete_project(project_id=job_id) +@router.delete( + "/{solver_key:path}/releases/{version}/jobs/{job_id:uuid}/assets", + status_code=status.HTTP_204_NO_CONTENT, + responses=JOBS_STATUS_CODES, + description=create_route_description( + base="Deletes assets associated with an existing solver job. N.B. this renders the solver job un-startable", + changelog=[ + FMSG_CHANGELOG_NEW_IN_VERSION.format("0.12"), + ], + ), +) +async def delete_job_assets( + solver_key: SolverKeyId, + version: VersionStr, + job_id: JobID, + job_service: Annotated[JobService, Depends(get_job_service)], +): + job_parent_resource_name = Solver.compose_resource_name(solver_key, version) + + # check that job exists and is accessible to user + project_job_rpc_get = await job_service.get_job( + job_parent_resource_name=job_parent_resource_name, job_id=job_id + ) + assert project_job_rpc_get.uuid == job_id # nosec + + await job_service.delete_job_assets( + job_parent_resource_name=job_parent_resource_name, job_id=job_id + ) + + @router.post( "/{solver_key:path}/releases/{version}/jobs/{job_id:uuid}:start", status_code=status.HTTP_202_ACCEPTED, @@ -165,6 +195,10 @@ async def delete_job( "description": "Cluster not found", "model": ErrorGet, }, + status.HTTP_409_CONFLICT: { + "description": "Job assets missing", + "model": ErrorGet, + }, status.HTTP_422_UNPROCESSABLE_ENTITY: { "description": "Configuration error", "model": ErrorGet, @@ -193,6 +227,7 @@ async def start_job( user_id: Annotated[PositiveInt, Depends(get_current_user_id)], director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], + job_service: Annotated[JobService, Depends(get_job_service)], cluster_id: Annotated[ # pylint: disable=unused-argument # noqa: ARG001 ClusterID | None, Query(deprecated=True) ] = None, @@ -200,6 +235,16 @@ async def start_job( job_name = compose_job_resource_name(solver_key, version, job_id) _logger.debug("Start Job '%s'", job_name) + job_parent_resource_name = Solver.compose_resource_name(solver_key, version) + job = await job_service.get_job( + job_id=job_id, job_parent_resource_name=job_parent_resource_name + ) + if job.storage_assets_deleted: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail=f"Assets for job job_id={job_id} are missing", + ) + try: await start_project( request=request, diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_read.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_read.py index f46ea4d9892..30136b408e7 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_read.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_read.py @@ -21,6 +21,7 @@ from sqlalchemy.ext.asyncio import AsyncEngine from starlette.background import BackgroundTask +from ..._service_jobs import JobService from ..._service_solvers import SolverService from ...exceptions.custom_errors import InsufficientCreditsError, MissingWalletError from ...exceptions.service_errors_utils import DEFAULT_BACKEND_SERVICE_STATUS_CODES @@ -43,7 +44,7 @@ PricingUnitGetLegacy, WalletGetWithAvailableCreditsLegacy, ) -from ...models.schemas.solvers import SolverKeyId +from ...models.schemas.solvers import Solver, SolverKeyId from ...services_http.director_v2 import DirectorV2Api from ...services_http.jobs import ( get_custom_metadata, @@ -58,7 +59,7 @@ from ..dependencies.database import get_db_asyncpg_engine from ..dependencies.models_schemas_jobs_filters import get_job_metadata_filter from ..dependencies.rabbitmq import get_log_check_timeout, get_log_distributor -from ..dependencies.services import get_api_client, get_solver_service +from ..dependencies.services import get_api_client, get_job_service, get_solver_service from ..dependencies.webserver_http import AuthSession, get_webserver_session from ._constants import ( FMSG_CHANGELOG_NEW_IN_VERSION, @@ -294,7 +295,13 @@ async def get_job( @router.get( "/{solver_key:path}/releases/{version}/jobs/{job_id:uuid}/outputs", response_model=JobOutputs, - responses=_OUTPUTS_STATUS_CODES, + responses=_OUTPUTS_STATUS_CODES + | { + status.HTTP_409_CONFLICT: { + "description": "Job assets missing", + "model": ErrorGet, + }, + }, ) async def get_job_outputs( solver_key: SolverKeyId, @@ -303,20 +310,34 @@ async def get_job_outputs( user_id: Annotated[PositiveInt, Depends(get_current_user_id)], async_pg_engine: Annotated[AsyncEngine, Depends(get_db_asyncpg_engine)], webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], + job_service: Annotated[JobService, Depends(get_job_service)], storage_client: Annotated[StorageApi, Depends(get_api_client(StorageApi))], ): job_name = compose_job_resource_name(solver_key, version, job_id) _logger.debug("Get Job '%s' outputs", job_name) - project: ProjectGet = await webserver_api.get_project(project_id=job_id) - node_ids = list(project.workbench.keys()) + project_marked_as_job = await job_service.get_job( + job_id=job_id, + job_parent_resource_name=Solver.compose_resource_name( + key=solver_key, version=version + ), + ) + node_ids = list(project_marked_as_job.workbench.keys()) assert len(node_ids) == 1 # nosec + if project_marked_as_job.storage_assets_deleted: + _logger.warning("Storage data for job '%s' has been deleted", job_name) + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, detail="Assets have been deleted" + ) + product_price = await webserver_api.get_product_price() if product_price.usd_per_credit is not None: - wallet = await webserver_api.get_project_wallet(project_id=project.uuid) + wallet = await webserver_api.get_project_wallet( + project_id=project_marked_as_job.uuid + ) if wallet is None: - raise MissingWalletError(job_id=project.uuid) + raise MissingWalletError(job_id=project_marked_as_job.uuid) wallet_with_credits = await webserver_api.get_wallet(wallet_id=wallet.wallet_id) if wallet_with_credits.available_credits <= ZERO_CREDITS: raise InsufficientCreditsError( diff --git a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py index 3a298efe380..8fd7e224eb8 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py @@ -164,6 +164,7 @@ async def create_study_job( user_id=user_id, project_uuid=job.id, job_parent_resource_name=job.runner_name, + storage_assets_deleted=False, ) project_inputs = await webserver_api.get_project_inputs(project_id=project.uuid) diff --git a/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py index f87978cd0dd..db7c88f6ca0 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py @@ -57,6 +57,11 @@ class ServiceForbiddenAccessError(BaseBackEndError): status_code = status.HTTP_403_FORBIDDEN +class JobForbiddenAccessError(BaseBackEndError): + msg_template = "Forbidden access to job {project_id}" + status_code = status.HTTP_403_FORBIDDEN + + class JobNotFoundError(BaseBackEndError): msg_template = "Could not get solver/study job {project_id}" status_code = status.HTTP_404_NOT_FOUND diff --git a/services/api-server/src/simcore_service_api_server/services_http/storage.py b/services/api-server/src/simcore_service_api_server/services_http/storage.py index 3f4899a0121..fc0cf744f96 100644 --- a/services/api-server/src/simcore_service_api_server/services_http/storage.py +++ b/services/api-server/src/simcore_service_api_server/services_http/storage.py @@ -29,7 +29,9 @@ ) from models_library.basic_types import SHA256Str from models_library.generics import Envelope +from models_library.projects import ProjectID from models_library.rest_pagination import PageLimitInt, PageOffsetInt +from models_library.users import UserID from pydantic import AnyUrl from settings_library.tracing import TracingSettings from simcore_service_api_server.exceptions.backend_errors import BackendTimeoutError @@ -284,6 +286,16 @@ async def create_soft_link( file_meta: File = to_file_api_model(stored_file_meta) return file_meta + @_exception_mapper(http_status_map={}) + async def delete_project_s3_assets( + self, user_id: UserID, project_id: ProjectID + ) -> None: + response = await self.client.delete( + f"/simcore-s3/folders/{project_id}", + params={"user_id": user_id}, + ) + response.raise_for_status() + # MODULES APP SETUP ------------------------------------------------------------- diff --git a/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py b/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py index 0ffd49be7c1..4fde8b5403c 100644 --- a/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py +++ b/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py @@ -44,6 +44,7 @@ from models_library.rpc.webserver.projects import ( ListProjectsMarkedAsJobRpcFilters, MetadataFilterItem, + ProjectJobRpcGet, ) from models_library.services_types import ServiceRunID from models_library.users import UserID @@ -63,6 +64,10 @@ NotEnoughAvailableSeatsError, ) from servicelib.rabbitmq.rpc_interfaces.webserver import projects as projects_rpc +from servicelib.rabbitmq.rpc_interfaces.webserver.errors import ( + ProjectForbiddenRpcError, + ProjectNotFoundRpcError, +) from servicelib.rabbitmq.rpc_interfaces.webserver.functions import ( functions_rpc_interface, ) @@ -83,6 +88,8 @@ from ..exceptions.backend_errors import ( CanNotCheckoutServiceIsNotRunningError, InsufficientNumberOfSeatsError, + JobForbiddenAccessError, + JobNotFoundError, LicensedItemCheckoutNotFoundError, ) from ..exceptions.service_errors_utils import service_exception_mapper @@ -242,6 +249,7 @@ async def mark_project_as_job( user_id: UserID, project_uuid: ProjectID, job_parent_resource_name: RelativeResourceName, + storage_assets_deleted: bool, ): await projects_rpc.mark_project_as_job( rpc_client=self._client, @@ -249,6 +257,29 @@ async def mark_project_as_job( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=storage_assets_deleted, + ) + + @_exception_mapper( + rpc_exception_map={ + ProjectForbiddenRpcError: JobForbiddenAccessError, + ProjectNotFoundRpcError: JobNotFoundError, + } + ) + async def get_project_marked_as_job( + self, + *, + product_name: ProductName, + user_id: UserID, + project_id: ProjectID, + job_parent_resource_name: RelativeResourceName, + ) -> ProjectJobRpcGet: + return await projects_rpc.get_project_marked_as_job( + rpc_client=self._client, + product_name=product_name, + user_id=user_id, + project_uuid=project_id, + job_parent_resource_name=job_parent_resource_name, ) async def list_projects_marked_as_jobs( diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py index a6aee073d27..a217ee1b6d5 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py @@ -227,6 +227,7 @@ async def test_get_function_job_outputs( mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], mock_registered_project_function_job: RegisteredProjectFunctionJob, mock_registered_project_function: RegisteredProjectFunction, + mocked_webserver_rpc_api: dict[str, MockType], auth: httpx.BasicAuth, job_outputs: dict[str, Any], ) -> None: diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py index 899a18553dc..edb8531a289 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py @@ -227,3 +227,64 @@ def create_project_side_effect(request: httpx.Request): ) assert resp.status_code == status.HTTP_201_CREATED job = Job.model_validate(resp.json()) + + +@pytest.fixture +def mocked_backend_services_apis_for_delete_job_assets( + mocked_webserver_rest_api: MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], + mocked_storage_rest_api_base: MockRouter, +) -> dict[str, MockRouter | dict[str, MockType]]: + + # Patch PATCH /projects/{project_id} + def _patch_project(request: httpx.Request, **kwargs): + # Accept any patch, return 204 No Content + return httpx.Response(status_code=status.HTTP_204_NO_CONTENT) + + mocked_webserver_rest_api.patch( + path__regex=r"/projects/(?P[\w-]+)$", + name="patch_project", + ).mock(side_effect=_patch_project) + + # Mock storage REST delete_project_s3_assets + def _delete_project_s3_assets(request: httpx.Request, **kwargs): + return httpx.Response(status_code=status.HTTP_204_NO_CONTENT) + + mocked_storage_rest_api_base.delete( + path__regex=r"/simcore-s3/folders/(?P[\w-]+)$", + name="delete_project_s3_assets", + ).mock(side_effect=_delete_project_s3_assets) + + return { + "webserver_rest": mocked_webserver_rest_api, + "webserver_rpc": mocked_webserver_rpc_api, + "storage_rest": mocked_storage_rest_api_base, + } + + +@pytest.mark.acceptance_test("Test delete_job_assets endpoint") +async def test_delete_job_assets_endpoint( + auth: httpx.BasicAuth, + client: httpx.AsyncClient, + solver_key: str, + solver_version: str, + mocked_backend_services_apis_for_delete_job_assets: dict[ + str, MockRouter | dict[str, MockType] + ], +): + job_id = "123e4567-e89b-12d3-a456-426614174000" + url = f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/{job_id}/assets" + + resp = await client.delete(url, auth=auth) + assert resp.status_code == status.HTTP_204_NO_CONTENT + + webserver_rest = mocked_backend_services_apis_for_delete_job_assets[ + "webserver_rest" + ] + assert webserver_rest["patch_project"].called + + storage_rest = mocked_backend_services_apis_for_delete_job_assets["storage_rest"] + assert storage_rest["delete_project_s3_assets"].called + + webserver_rpc = mocked_backend_services_apis_for_delete_job_assets["webserver_rpc"] + assert webserver_rpc["mark_project_as_job"].called diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index 182d4509965..51370bb7ce1 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -42,6 +42,7 @@ from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import BaseFileLink, SimcoreS3FileID +from models_library.rpc.webserver.projects import ProjectJobRpcGet from models_library.users import UserID from moto.server import ThreadedMotoServer from packaging.version import Version @@ -543,9 +544,17 @@ def mocked_catalog_rest_api_base( yield respx_mock +@pytest.fixture +def project_job_rpc_get() -> ProjectJobRpcGet: + example = ProjectJobRpcGet.model_json_schema()["examples"][0] + return ProjectJobRpcGet.model_validate(example) + + @pytest.fixture def mocked_webserver_rpc_api( - mocked_app_dependencies: None, mocker: MockerFixture + mocked_app_dependencies: None, + mocker: MockerFixture, + project_job_rpc_get: ProjectJobRpcGet, ) -> dict[str, MockType]: """ Mocks the webserver's simcore service RPC API for testing purposes. @@ -554,7 +563,7 @@ def mocked_webserver_rpc_api( projects as projects_rpc, # keep import here ) - side_effects = WebserverRpcSideEffects() + side_effects = WebserverRpcSideEffects(project_job_rpc_get=project_job_rpc_get) return { "mark_project_as_job": mocker.patch.object( @@ -563,6 +572,12 @@ def mocked_webserver_rpc_api( autospec=True, side_effect=side_effects.mark_project_as_job, ), + "get_project_marked_as_job": mocker.patch.object( + projects_rpc, + "get_project_marked_as_job", + autospec=True, + side_effect=side_effects.get_project_marked_as_job, + ), "list_projects_marked_as_jobs": mocker.patch.object( projects_rpc, "list_projects_marked_as_jobs", diff --git a/services/api-server/tests/unit/service/conftest.py b/services/api-server/tests/unit/service/conftest.py index 10fc78b4474..0dc632def76 100644 --- a/services/api-server/tests/unit/service/conftest.py +++ b/services/api-server/tests/unit/service/conftest.py @@ -20,6 +20,7 @@ from simcore_service_api_server._service_programs import ProgramService from simcore_service_api_server._service_solvers import SolverService from simcore_service_api_server._service_studies import StudyService +from simcore_service_api_server.services_http.storage import StorageApi from simcore_service_api_server.services_http.webserver import AuthSession from simcore_service_api_server.services_rpc.catalog import CatalogService from simcore_service_api_server.services_rpc.director_v2 import DirectorV2Service @@ -108,11 +109,20 @@ async def _create_project(project: ProjectCreateNew, **kwargs): return mock +@pytest.fixture +def storage_rest_client( + mocker: MockerFixture, +) -> StorageApi: + mock = mocker.AsyncMock(spec=StorageApi) + return mock + + @pytest.fixture def job_service( auth_session: AuthSession, director_v2_rpc_client: DirectorV2Service, storage_rpc_client: StorageService, + storage_rest_client: StorageApi, wb_api_rpc_client: WbApiRpcClient, product_name: ProductName, user_id: UserID, @@ -121,6 +131,7 @@ def job_service( _web_rest_client=auth_session, _web_rpc_client=wb_api_rpc_client, _storage_rpc_client=storage_rpc_client, + _storage_rest_client=storage_rest_client, _directorv2_rpc_client=director_v2_rpc_client, user_id=user_id, product_name=product_name, diff --git a/services/api-server/tests/unit/service/test_service_solvers.py b/services/api-server/tests/unit/service/test_service_solvers.py index a32d3b82f6f..7c912e1657d 100644 --- a/services/api-server/tests/unit/service/test_service_solvers.py +++ b/services/api-server/tests/unit/service/test_service_solvers.py @@ -5,15 +5,25 @@ import pytest from models_library.products import ProductName +from models_library.projects import ProjectID +from models_library.rpc.webserver.projects import ProjectJobRpcGet from models_library.users import UserID from pytest_mock import MockType from simcore_service_api_server._service_jobs import JobService from simcore_service_api_server._service_solvers import SolverService +from simcore_service_api_server.exceptions.backend_errors import ( + JobForbiddenAccessError, + JobNotFoundError, +) from simcore_service_api_server.exceptions.custom_errors import ( ServiceConfigurationError, ) from simcore_service_api_server.models.schemas.solvers import Solver from simcore_service_api_server.services_rpc.catalog import CatalogService +from simcore_service_api_server.services_rpc.wb_api_server import ( + ProjectForbiddenRpcError, + ProjectNotFoundRpcError, +) async def test_get_solver( @@ -73,3 +83,51 @@ async def test_solver_service_init_raises_configuration_error( ) # Verify the RPC call was made to check consistency assert not mocked_rpc_client.request.called + + +async def test_job_service_get_job_success( + mocked_rpc_client: MockType, + job_service: JobService, +): + job_parent_resource_name = "solver-resource" + job_id = ProjectID("123e4567-e89b-12d3-a456-426614174000") + + # Act + result = await job_service.get_job(job_parent_resource_name, job_id) + + # Assert + assert isinstance(result, ProjectJobRpcGet) + assert result.job_parent_resource_name.startswith(job_parent_resource_name) + assert mocked_rpc_client.request.called + assert mocked_rpc_client.request.call_args.args == ( + "webserver", + "get_project_marked_as_job", + ) + assert ( + mocked_rpc_client.request.call_args.kwargs["job_parent_resource_name"] + == job_parent_resource_name + ) + assert mocked_rpc_client.request.call_args.kwargs["project_uuid"] == job_id + + +@pytest.mark.parametrize( + "client_exception_type,api_exception_type", + [ + (ProjectForbiddenRpcError, JobForbiddenAccessError), + (ProjectNotFoundRpcError, JobNotFoundError), + ], +) +async def test_job_service_get_job_exceptions( + mocker, + job_service: JobService, + client_exception_type: type[Exception], + api_exception_type: type[Exception], +): + job_parent_resource_name = "solver-resource" + job_id = ProjectID("123e4567-e89b-12d3-a456-426614174000") + # Patch the actual RPC interface method + patch_path = "servicelib.rabbitmq.rpc_interfaces.webserver.projects.get_project_marked_as_job" + mocker.patch(patch_path, side_effect=client_exception_type()) + + with pytest.raises(api_exception_type): + await job_service.get_job(job_parent_resource_name, job_id) diff --git a/services/api-server/tests/unit/test_api_solver_jobs.py b/services/api-server/tests/unit/test_api_solver_jobs.py index 518d183603d..33363cb4e33 100644 --- a/services/api-server/tests/unit/test_api_solver_jobs.py +++ b/services/api-server/tests/unit/test_api_solver_jobs.py @@ -3,10 +3,11 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments +from datetime import datetime from decimal import Decimal from pathlib import Path from typing import Any, Final -from uuid import UUID +from uuid import UUID, uuid4 import httpx import pytest @@ -15,7 +16,10 @@ from fastapi.encoders import jsonable_encoder from httpx import AsyncClient from models_library.generics import Envelope +from models_library.projects_nodes import Node +from models_library.rpc.webserver.projects import ProjectJobRpcGet from pydantic import TypeAdapter +from pytest_mock import MockType from pytest_simcore.helpers.httpx_calls_capture_models import ( CreateRespxMockCallback, HttpApiCallCaptureModel, @@ -205,6 +209,7 @@ async def test_start_solver_job_pricing_unit_with_payment( client: AsyncClient, mocked_webserver_rest_api_base: MockRouter, mocked_directorv2_rest_api_base: MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], create_respx_mock_from_capture: CreateRespxMockCallback, auth: httpx.BasicAuth, project_tests_dir: Path, @@ -280,6 +285,7 @@ async def test_get_solver_job_pricing_unit_no_payment( client: AsyncClient, mocked_webserver_rest_api_base: MockRouter, mocked_directorv2_rest_api_base: MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], create_respx_mock_from_capture: CreateRespxMockCallback, auth: httpx.BasicAuth, project_tests_dir: Path, @@ -313,6 +319,7 @@ async def test_start_solver_job_conflict( client: AsyncClient, mocked_webserver_rest_api_base: MockRouter, mocked_directorv2_rest_api_base: MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], create_respx_mock_from_capture: CreateRespxMockCallback, auth: httpx.BasicAuth, project_tests_dir: Path, @@ -343,6 +350,57 @@ async def test_start_solver_job_conflict( assert f"{job_status.job_id}" == _job_id +@pytest.mark.parametrize( + "project_job_rpc_get", + [ + pytest.param( + ProjectJobRpcGet( + uuid=UUID("00000000-1234-5678-1234-123456789012"), + name="A study job", + description="A description of a study job with many node", + workbench={}, + created_at=datetime.fromisoformat("2023-02-01T00:00:00Z"), + modified_at=datetime.fromisoformat("2023-02-01T00:00:00Z"), + job_parent_resource_name="studies/96642f2a-a72c-11ef-8776-02420a00087d", + storage_assets_deleted=True, + ), + id="storage_assets_deleted", + ) + ], +) +async def test_start_solver_job_storage_data_missing( + client: AsyncClient, + mocked_webserver_rest_api_base: MockRouter, + mocked_directorv2_rest_api_base: MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], + create_respx_mock_from_capture: CreateRespxMockCallback, + auth: httpx.BasicAuth, + project_tests_dir: Path, +): + _solver_key: str = "simcore/services/comp/itis/sleeper" + _version: str = "2.0.2" + _job_id: str = "b9faf8d8-4928-4e50-af40-3690712c5481" + + create_respx_mock_from_capture( + respx_mocks=[ + mocked_directorv2_rest_api_base, + mocked_webserver_rest_api_base, + ], + capture_path=project_tests_dir / "mocks" / "start_solver_job.json", + side_effects_callbacks=[ + _start_job_side_effect, + _get_inspect_job_side_effect(job_id=_job_id), + ], + ) + + response = await client.post( + f"{API_VTAG}/solvers/{_solver_key}/releases/{_version}/jobs/{_job_id}:start", + auth=auth, + ) + + assert response.status_code == status.HTTP_409_CONFLICT + + async def test_stop_job( client: AsyncClient, mocked_directorv2_rest_api_base: MockRouter, @@ -393,6 +451,7 @@ async def test_get_solver_job_outputs( mocked_webserver_rest_api_base: MockRouter, mocked_storage_rest_api_base: MockRouter, mocked_solver_job_outputs: None, + mocked_webserver_rpc_api: dict[str, MockType], create_respx_mock_from_capture: CreateRespxMockCallback, auth: httpx.BasicAuth, project_tests_dir: Path, @@ -442,3 +501,59 @@ def _wallet_side_effect( ) assert response.status_code == expected_status_code + + +@pytest.mark.parametrize( + "project_job_rpc_get", + [ + ProjectJobRpcGet( + uuid=UUID("12345678-1234-5678-1234-123456789012"), + name="A solver job", + description="A description of a solver job with a single node", + workbench={ + f"{uuid4()}": Node.model_validate( + Node.model_json_schema()["examples"][0] + ) + }, + created_at=datetime.fromisoformat("2023-01-01T00:00:00Z"), + modified_at=datetime.fromisoformat("2023-01-01T00:00:00Z"), + job_parent_resource_name="solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.2", + storage_assets_deleted=True, + ) + ], +) +async def test_get_solver_job_outputs_assets_deleted( + client: AsyncClient, + mocked_webserver_rest_api_base: MockRouter, + mocked_storage_rest_api_base: MockRouter, + mocked_solver_job_outputs: None, + mocked_webserver_rpc_api: dict[str, MockType], + create_respx_mock_from_capture: CreateRespxMockCallback, + auth: httpx.BasicAuth, + project_tests_dir: Path, +): + def _sf( + request: httpx.Request, + path_params: dict[str, Any], + capture: HttpApiCallCaptureModel, + ) -> Any: + return capture.response_body + + create_respx_mock_from_capture( + respx_mocks=[ + mocked_webserver_rest_api_base, + mocked_storage_rest_api_base, + ], + capture_path=project_tests_dir / "mocks" / "get_solver_outputs.json", + side_effects_callbacks=[_sf, _sf, _sf, _sf, _sf], + ) + + _solver_key: Final[str] = "simcore/services/comp/isolve" + _version: Final[str] = "2.1.24" + _job_id: Final[str] = "1eefc09b-5d08-4022-bc18-33dedbbd7d0f" + response = await client.get( + f"{API_VTAG}/solvers/{_solver_key}/releases/{_version}/jobs/{_job_id}/outputs", + auth=auth, + ) + + assert response.status_code == status.HTTP_409_CONFLICT diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index a8b1de9be4d..d12c0bfa74e 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -15702,6 +15702,11 @@ components: anyOf: - $ref: '#/components/schemas/ProjectTemplateType' - type: 'null' + hidden: + anyOf: + - type: boolean + - type: 'null' + title: Hidden type: object title: ProjectPatch ProjectPermalink: diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rpc.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rpc.py index e511d4dd498..6cb359da3f6 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rpc.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rpc.py @@ -38,6 +38,7 @@ async def mark_project_as_job( user_id: UserID, project_uuid: ProjectID, job_parent_resource_name: str, + storage_assets_deleted: bool, ) -> None: try: @@ -48,6 +49,7 @@ async def mark_project_as_job( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=storage_assets_deleted, ) except ProjectInvalidRightsError as err: raise ProjectForbiddenRpcError.from_domain_error(err) from err @@ -97,6 +99,7 @@ async def list_projects_marked_as_jobs( created_at=project.creation_date, modified_at=project.last_change_date, job_parent_resource_name=project.job_parent_resource_name, + storage_assets_deleted=project.storage_assets_deleted, ) for project in projects ] @@ -111,6 +114,49 @@ async def list_projects_marked_as_jobs( return page +@router.expose( + reraise_if_error_type=( + ValidationError, + ProjectForbiddenRpcError, + ProjectNotFoundRpcError, + ) +) +@validate_call(config={"arbitrary_types_allowed": True}) +async def get_project_marked_as_job( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + project_uuid: ProjectID, + job_parent_resource_name: str, +) -> ProjectJobRpcGet: + + try: + project = await _jobs_service.get_project_marked_as_job( + app, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + except ProjectInvalidRightsError as err: + raise ProjectForbiddenRpcError.from_domain_error(err) from err + + except ProjectNotFoundError as err: + raise ProjectNotFoundRpcError.from_domain_error(err) from err + + return ProjectJobRpcGet( + uuid=project.uuid, + name=project.name, + description=project.description, + workbench=project.workbench, + created_at=project.creation_date, + modified_at=project.last_change_date, + job_parent_resource_name=project.job_parent_resource_name, + storage_assets_deleted=project.storage_assets_deleted, + ) + + async def register_rpc_routes_on_startup(app: web.Application): rpc_server = get_rabbitmq_rpc_server(app) await rpc_server.register_router(router, WEBSERVER_RPC_NAMESPACE, app) diff --git a/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py b/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py index dca8d1c91cc..539b1085036 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py @@ -67,6 +67,7 @@ async def set_project_as_job( *, project_uuid: ProjectID, job_parent_resource_name: str, + storage_assets_deleted: bool, ) -> None: async with transaction_context(self.engine, connection) as conn: stmt = ( @@ -74,10 +75,14 @@ async def set_project_as_job( .values( project_uuid=f"{project_uuid}", job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=storage_assets_deleted, ) .on_conflict_do_update( index_elements=["project_uuid", "job_parent_resource_name"], - set_={"job_parent_resource_name": job_parent_resource_name}, + set_={ + "job_parent_resource_name": job_parent_resource_name, + "storage_assets_deleted": storage_assets_deleted, + }, ) ) @@ -175,6 +180,7 @@ async def list_projects_marked_as_jobs( *_PROJECT_DB_COLS, projects.c.workbench, base_query.c.job_parent_resource_name, + base_query.c.storage_assets_deleted, ) .select_from( base_query.join( @@ -201,3 +207,41 @@ async def list_projects_marked_as_jobs( ) return total_count, projects_list + + async def get_project_marked_as_job( + self, + connection: AsyncConnection | None = None, + *, + project_uuid: ProjectID, + job_parent_resource_name: str, + ) -> ProjectJobDBGet | None: + """ + Returns the project associated with the given project_uuid and job_parent_resource_name + """ + query = ( + sa.select( + *_PROJECT_DB_COLS, + projects.c.workbench, + projects_to_jobs.c.job_parent_resource_name, + projects_to_jobs.c.storage_assets_deleted, + ) + .select_from( + projects_to_jobs.join( + projects, + projects_to_jobs.c.project_uuid == projects.c.uuid, + ) + ) + .where( + projects_to_jobs.c.project_uuid == f"{project_uuid}", + projects_to_jobs.c.job_parent_resource_name == job_parent_resource_name, + projects.c.workspace_id.is_(None), + ) + .limit(1) + ) + + async with pass_or_acquire_connection(self.engine, connection) as conn: + result = await conn.execute(query) + row = result.first() + if row is None: + return None + return TypeAdapter(ProjectJobDBGet).validate_python(row) diff --git a/services/web/server/src/simcore_service_webserver/projects/_jobs_service.py b/services/web/server/src/simcore_service_webserver/projects/_jobs_service.py index 05a26e1466c..f8f17a28ef9 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_jobs_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_jobs_service.py @@ -9,6 +9,7 @@ from ._access_rights_service import check_user_project_permission from ._jobs_repository import ProjectJobsRepository +from .exceptions import ProjectNotFoundError from .models import ProjectJobDBGet _logger = logging.getLogger(__name__) @@ -31,6 +32,7 @@ async def set_project_as_job( job_parent_resource_name: Annotated[ str, AfterValidator(_validate_job_parent_resource_name) ], + storage_assets_deleted: bool, ) -> None: await check_user_project_permission( @@ -44,7 +46,9 @@ async def set_project_as_job( repo = ProjectJobsRepository.create_from_app(app) await repo.set_project_as_job( - project_uuid=project_uuid, job_parent_resource_name=job_parent_resource_name + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=storage_assets_deleted, ) @@ -78,3 +82,38 @@ async def list_my_projects_marked_as_jobs( filter_by_job_parent_resource_name_prefix=filter_by_job_parent_resource_name_prefix, filter_any_custom_metadata=filter_any_custom_metadata, ) + + +@validate_call(config={"arbitrary_types_allowed": True}) +async def get_project_marked_as_job( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + project_uuid: ProjectID, + job_parent_resource_name: Annotated[ + str, AfterValidator(_validate_job_parent_resource_name) + ], +) -> ProjectJobDBGet: + """ + Retrieves the project associated with the given project_uuid and job_parent_resource_name. + Raises: + web.HTTPNotFound: if no project is found. + """ + await check_user_project_permission( + app, + project_id=project_uuid, + user_id=user_id, + product_name=product_name, + permission="read", + ) + repo = ProjectJobsRepository.create_from_app(app) + project_id = await repo.get_project_marked_as_job( + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + if not project_id: + raise ProjectNotFoundError( + project_uuid=project_uuid, + ) + return project_id diff --git a/services/web/server/src/simcore_service_webserver/projects/models.py b/services/web/server/src/simcore_service_webserver/projects/models.py index 23225da3d76..84bf04f8d53 100644 --- a/services/web/server/src/simcore_service_webserver/projects/models.py +++ b/services/web/server/src/simcore_service_webserver/projects/models.py @@ -81,6 +81,7 @@ class ProjectJobDBGet(ProjectDBGet): workbench: NodesDict job_parent_resource_name: str + storage_assets_deleted: bool class ProjectWithTrashExtra(ProjectDBGet): diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects__jobs_service.py b/services/web/server/tests/unit/with_dbs/02/test_projects__jobs_service.py index fa56667348a..0e44cd5e9c6 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects__jobs_service.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects__jobs_service.py @@ -56,6 +56,7 @@ async def project_job_fixture( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=False, ) return ProjectJobFixture( user_id=user_id, @@ -221,6 +222,7 @@ async def test_filter_projects_by_metadata( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=False, ) # 2. Set custom metadata diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_rpc.py b/services/web/server/tests/unit/with_dbs/02/test_projects_rpc.py index 21d8b7df223..a7ec233d29d 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_rpc.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_rpc.py @@ -90,6 +90,7 @@ async def test_rpc_client_mark_project_as_job( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name="solvers/solver123/version/1.2.3", + storage_assets_deleted=False, ) @@ -109,6 +110,7 @@ async def test_rpc_client_list_my_projects_marked_as_jobs( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name="solvers/solver123/version/1.2.3", + storage_assets_deleted=False, ) # List projects marked as jobs @@ -170,6 +172,7 @@ async def test_errors_on_rpc_client_mark_project_as_job( user_id=other_user_id, # <-- no access project_uuid=project_uuid, job_parent_resource_name="solvers/solver123/version/1.2.3", + storage_assets_deleted=False, ) assert exc_info.value.error_context()["project_uuid"] == project_uuid @@ -181,6 +184,7 @@ async def test_errors_on_rpc_client_mark_project_as_job( user_id=logged_user["id"], project_uuid=UUID("00000000-0000-0000-0000-000000000000"), # <-- wont find job_parent_resource_name="solvers/solver123/version/1.2.3", + storage_assets_deleted=False, ) with pytest.raises(ValidationError, match="job_parent_resource_name") as exc_info: @@ -190,6 +194,7 @@ async def test_errors_on_rpc_client_mark_project_as_job( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name="This is not a resource", # <-- wrong format + storage_assets_deleted=False, ) assert exc_info.value.error_count() == 1 @@ -216,6 +221,7 @@ async def test_rpc_client_list_projects_marked_as_jobs_with_metadata_filter( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name="solvers/solver123/version/1.2.3", + storage_assets_deleted=False, ) # Set custom metadata on the project @@ -307,3 +313,147 @@ async def test_rpc_client_list_projects_marked_as_jobs_with_metadata_filter( assert page.meta.total == 0 assert len(page.data) == 0 + + +async def test_rpc_client_get_project_marked_as_job_found( + rpc_client: RabbitMQRPCClient, + product_name: ProductName, + logged_user: UserInfoDict, + user_project: ProjectDict, +): + project_uuid = ProjectID(user_project["uuid"]) + user_id = logged_user["id"] + job_parent_resource_name = "solvers/solver123/version/1.2.3" + + # Mark the project as a job first + await projects_rpc.mark_project_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=False, + ) + + # Should be able to retrieve it + project_job = await projects_rpc.get_project_marked_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + assert project_job.uuid == project_uuid + assert project_job.job_parent_resource_name == job_parent_resource_name + assert project_job.name == user_project["name"] + + +async def test_rpc_client_get_project_marked_as_job_not_found( + rpc_client: RabbitMQRPCClient, + product_name: ProductName, + logged_user: UserInfoDict, + user_project: ProjectDict, +): + + project_uuid = ProjectID(user_project["uuid"]) + user_id = logged_user["id"] + job_parent_resource_name = "solvers/solver123/version/1.2.3" + + # Do NOT mark the project as a job, so it should not be found + with pytest.raises(ProjectNotFoundRpcError): + await projects_rpc.get_project_marked_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + + +async def test_rpc_client_get_project_marked_as_job_forbidden( + rpc_client: RabbitMQRPCClient, + product_name: ProductName, + logged_user: UserInfoDict, + other_user: UserInfoDict, + user_project: ProjectDict, +): + """ + Ensures ProjectForbiddenRpcError is raised if the user does not have read access to the project. + """ + project_uuid = ProjectID(user_project["uuid"]) + job_parent_resource_name = "solvers/solver123/version/1.2.3" + + # Mark the project as a job as the owner + await projects_rpc.mark_project_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=logged_user["id"], + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=False, + ) + + # Try to get the project as another user (should not have access) + with pytest.raises(ProjectForbiddenRpcError): + await projects_rpc.get_project_marked_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=other_user["id"], + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + + +async def test_mark_and_get_project_job_storage_assets_deleted( + rpc_client: RabbitMQRPCClient, + product_name: ProductName, + logged_user: UserInfoDict, + user_project: ProjectDict, +): + """ + Marks a project as a job with storage_assets_deleted True, checks the value, + then marks it again with storage_assets_deleted False and checks the value again. + """ + project_uuid = ProjectID(user_project["uuid"]) + user_id = logged_user["id"] + job_parent_resource_name = "solvers/solver123/version/1.2.3" + + # First mark as job with storage_assets_deleted=True + await projects_rpc.mark_project_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=True, + ) + + # Retrieve and check + project_job = await projects_rpc.get_project_marked_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + assert project_job.storage_assets_deleted is True + + # Mark again as job with storage_assets_deleted=False + await projects_rpc.mark_project_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=False, + ) + + # Retrieve and check again + project_job = await projects_rpc.get_project_marked_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + assert project_job.storage_assets_deleted is False