diff --git a/packages/common-library/src/common_library/exclude.py b/packages/common-library/src/common_library/exclude.py index 7f2392dec338..e24efb998c4e 100644 --- a/packages/common-library/src/common_library/exclude.py +++ b/packages/common-library/src/common_library/exclude.py @@ -12,6 +12,10 @@ def is_unset(v: Any) -> bool: return isinstance(v, UnSet) +def is_set(v: Any) -> bool: + return not isinstance(v, UnSet) + + def as_dict_exclude_unset(**params) -> dict[str, Any]: return {k: v for k, v in params.items() if not isinstance(v, UnSet)} diff --git a/packages/models-library/src/models_library/licenses.py b/packages/models-library/src/models_library/licenses.py index c9d927ff9004..b65b7f9d6feb 100644 --- a/packages/models-library/src/models_library/licenses.py +++ b/packages/models-library/src/models_library/licenses.py @@ -26,16 +26,18 @@ class LicensedResourceType(StrAutoEnum): VIP_MODEL = auto() -VIP_FEATURES_EXAMPLE = { - "name": "Duke", - "version": "V2.0", - "sex": "Mas bien poco", +_VIP_FEATURES_EXAMPLE = { + # NOTE: this view is how it would be after parsed and validated "age": "34 years", - "weight": "70.2 Kg", - "height": "1.77 m", "date": "2015-03-01", "ethnicity": "Caucasian", "functionality": "Static", + "height": "1.77 m", + "name": "Duke", + "sex": "Male", + "version": "V2.0", + "weight": "70.2 Kg", + # other "additional_field": "allowed", } @@ -58,7 +60,7 @@ class FeaturesDict(TypedDict): "id": 1, "description": "A detailed description of the VIP model", "thumbnail": "https://example.com/thumbnail.jpg", - "features": VIP_FEATURES_EXAMPLE, + "features": _VIP_FEATURES_EXAMPLE, "doi": "10.1000/xyz123", "license_key": "ABC123XYZ", "license_version": "1.0", diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py b/packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py index 1fd0f9f71d48..fc931cbebd59 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py @@ -29,8 +29,10 @@ async def assert_status( data, error = unwrap_envelope(json_response) assert response.status == expected_status_code, ( - f"received {response.status}: ({data},{error})" - f", expected {get_code_display_name(expected_status_code)} : {expected_msg or ''}" + f"Expected: {get_code_display_name(expected_status_code)} : {expected_msg or ''}" + f"Got: {response.status}:\n" + f" - data :{pformat(data)}\n" + f" - error:{pformat(error)}\n)" ) if is_error(expected_status_code): diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 59601b8e6226..0c32076f8eb2 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -971,16 +971,15 @@ services: init: true hostname: "gc-{{.Node.Hostname}}-{{.Task.Slot}}" # the hostname is used in conjonction with other services and must be unique see https://github.com/ITISFoundation/osparc-simcore/pull/5931 environment: - WEBSERVER_LOGLEVEL: ${WB_GC_LOGLEVEL} - WEBSERVER_HOST: ${WEBSERVER_HOST} - WEBSERVER_PORT: ${WEBSERVER_PORT} + # WEBSERVER_DIRECTOR_V2 + DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} + DIRECTOR_V2_PORT: ${DIRECTOR_V2_PORT} - # WEBSERVER_RESOURCE_USAGE_TRACKER - RESOURCE_USAGE_TRACKER_HOST: ${RESOURCE_USAGE_TRACKER_HOST} - RESOURCE_USAGE_TRACKER_PORT: ${RESOURCE_USAGE_TRACKER_EXTERNAL_PORT} + GUNICORN_CMD_ARGS: ${WEBSERVER_GUNICORN_CMD_ARGS} - REST_SWAGGER_API_DOC_ENABLED: ${WB_GC_REST_SWAGGER_API_DOC_ENABLED} + LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} # WEBSERVER_DB POSTGRES_DB: ${POSTGRES_DB} @@ -990,47 +989,61 @@ services: POSTGRES_PORT: ${POSTGRES_PORT} POSTGRES_USER: ${POSTGRES_USER} - DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} - DIRECTOR_V2_PORT: ${DIRECTOR_V2_PORT} - - GUNICORN_CMD_ARGS: ${WEBSERVER_GUNICORN_CMD_ARGS} - - LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} - - STORAGE_HOST: ${STORAGE_HOST} - STORAGE_PORT: ${STORAGE_PORT} + # WEBSERVER_RABBITMQ + RABBIT_HOST: ${RABBIT_HOST} + RABBIT_PASSWORD: ${RABBIT_PASSWORD} + RABBIT_PORT: ${RABBIT_PORT} + RABBIT_SECURE: ${RABBIT_SECURE} + RABBIT_USER: ${RABBIT_USER} + # WEBSERVER_REDIS REDIS_HOST: ${REDIS_HOST} + REDIS_PASSWORD: ${REDIS_PASSWORD} REDIS_PORT: ${REDIS_PORT} REDIS_SECURE: ${REDIS_SECURE} REDIS_USER: ${REDIS_USER} - REDIS_PASSWORD: ${REDIS_PASSWORD} - - SWARM_STACK_NAME: ${SWARM_STACK_NAME} - WEBSERVER_DB_LISTENER: ${WB_GC_DB_LISTENER} + # WEBSERVER_RESOURCE_MANAGER + RESOURCE_MANAGER_RESOURCE_TTL_S: ${WB_GC_RESOURCE_MANAGER_RESOURCE_TTL_S} - WEBSERVER_GARBAGE_COLLECTOR: ${WB_GC_GARBAGE_COLLECTOR} + # WEBSERVER_RESOURCE_USAGE_TRACKER + RESOURCE_USAGE_TRACKER_HOST: ${RESOURCE_USAGE_TRACKER_HOST} + RESOURCE_USAGE_TRACKER_PORT: ${RESOURCE_USAGE_TRACKER_EXTERNAL_PORT} - RESOURCE_MANAGER_RESOURCE_TTL_S: ${WB_GC_RESOURCE_MANAGER_RESOURCE_TTL_S} + REST_SWAGGER_API_DOC_ENABLED: ${WB_GC_REST_SWAGGER_API_DOC_ENABLED} + # WEBSERVER_SESSION SESSION_SECRET_KEY: ${WEBSERVER_SESSION_SECRET_KEY} + + # WEBSERVER_STORAGE + STORAGE_HOST: ${STORAGE_HOST} + STORAGE_PORT: ${STORAGE_PORT} + + SWARM_STACK_NAME: ${SWARM_STACK_NAME} + + # WEBSERVER_TRASH + TRASH_RETENTION_DAYS: ${TRASH_RETENTION_DAYS} + WEBSERVER_ACTIVITY: ${WB_GC_ACTIVITY} WEBSERVER_ANNOUNCEMENTS: ${WB_GC_ANNOUNCEMENTS} WEBSERVER_CATALOG: ${WB_GC_CATALOG} WEBSERVER_CLUSTERS: ${WB_GC_CLUSTERS} + WEBSERVER_DB_LISTENER: ${WB_GC_DB_LISTENER} WEBSERVER_DIAGNOSTICS: ${WB_GC_DIAGNOSTICS} WEBSERVER_EMAIL: ${WB_GC_EMAIL} WEBSERVER_EXPORTER: ${WB_GC_EXPORTER} WEBSERVER_FOLDERS: ${WB_GC_FOLDERS} WEBSERVER_FRONTEND: ${WB_GC_FRONTEND} + WEBSERVER_GARBAGE_COLLECTOR: ${WB_GC_GARBAGE_COLLECTOR} WEBSERVER_GROUPS: ${WB_GC_GROUPS} + WEBSERVER_HOST: ${WEBSERVER_HOST} WEBSERVER_INVITATIONS: ${WB_GC_INVITATIONS} WEBSERVER_LICENSES: null WEBSERVER_LOGIN: ${WB_GC_LOGIN} + WEBSERVER_LOGLEVEL: ${WB_GC_LOGLEVEL} WEBSERVER_NOTIFICATIONS: ${WB_GC_NOTIFICATIONS} WEBSERVER_PAYMENTS: ${WB_GC_PAYMENTS} + WEBSERVER_PORT: ${WEBSERVER_PORT} WEBSERVER_PRODUCTS: ${WB_GC_PRODUCTS} WEBSERVER_PROJECTS: ${WB_GC_PROJECTS} WEBSERVER_PUBLICATIONS: ${WB_GC_PUBLICATIONS} @@ -1043,12 +1056,7 @@ services: WEBSERVER_USERS: ${WB_GC_USERS} WEBSERVER_WALLETS: ${WB_GC_WALLETS} - # WEBSERVER_RABBITMQ - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} + networks: - default - interactive_services_subnet diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_repository.py b/services/web/server/src/simcore_service_webserver/folders/_folders_repository.py index 0d2842c878ed..31ce1b0fc8e0 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_repository.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_repository.py @@ -1,10 +1,11 @@ import logging +from collections.abc import Callable from datetime import datetime -from typing import Final, cast +from typing import cast import sqlalchemy as sa from aiohttp import web -from common_library.exclude import UnSet, as_dict_exclude_unset +from common_library.exclude import UnSet, as_dict_exclude_unset, is_set from models_library.folders import ( FolderDB, FolderID, @@ -31,18 +32,16 @@ from simcore_postgres_database.utils_workspaces_sql import ( create_my_workspace_access_rights_subquery, ) -from sqlalchemy import func +from sqlalchemy import sql from sqlalchemy.ext.asyncio import AsyncConnection from sqlalchemy.orm import aliased -from sqlalchemy.sql import ColumnElement, CompoundSelect, Select, asc, desc, select +from sqlalchemy.sql import ColumnElement, CompoundSelect, Select from ..db.plugin import get_asyncpg_engine from .errors import FolderAccessForbiddenError, FolderNotFoundError _logger = logging.getLogger(__name__) -_unset: Final = UnSet() - _FOLDER_DB_MODEL_COLS = get_columns_from_db_model(folders_v2, FolderDB) @@ -72,8 +71,8 @@ async def create( user_id=user_id, workspace_id=workspace_id, created_by_gid=created_by_gid, - created=func.now(), - modified=func.now(), + created=sql.func.now(), + modified=sql.func.now(), ) .returning(*_FOLDER_DB_MODEL_COLS) ) @@ -92,9 +91,11 @@ def _create_private_workspace_query( WorkspaceScope.ALL, ) return ( - select( + sql.select( *_FOLDER_DB_MODEL_COLS, - func.json_build_object( + # NOTE: design INVARIANT: + # a user in his private workspace owns his folders + sql.func.json_build_object( "read", sa.text("true"), "write", @@ -129,8 +130,10 @@ def _create_shared_workspace_query( ) shared_workspace_query = ( - select( + sql.select( *_FOLDER_DB_MODEL_COLS, + # NOTE: design INVARIANT: + # a user access rights to a folder in a SHARED workspace is inherited from the workspace workspace_access_rights_subquery.c.my_access_rights, ) .select_from( @@ -157,6 +160,14 @@ def _create_shared_workspace_query( return shared_workspace_query +def _to_sql_expression(table: sa.Table, order_by: OrderBy): + direction_func: Callable = { + OrderDirection.ASC: sql.asc, + OrderDirection.DESC: sql.desc, + }[order_by.direction] + return direction_func(table.columns[order_by.field]) + + async def list_( # pylint: disable=too-many-arguments,too-many-branches app: web.Application, connection: AsyncConnection | None = None, @@ -234,16 +245,14 @@ async def list_( # pylint: disable=too-many-arguments,too-many-branches raise ValueError(msg) # Select total count from base_query - count_query = select(func.count()).select_from(combined_query.subquery()) + count_query = sql.select(sql.func.count()).select_from(combined_query.subquery()) # Ordering and pagination - if order_by.direction == OrderDirection.ASC: - list_query = combined_query.order_by(asc(getattr(folders_v2.c, order_by.field))) - else: - list_query = combined_query.order_by( - desc(getattr(folders_v2.c, order_by.field)) - ) - list_query = list_query.offset(offset).limit(limit) + list_query = ( + combined_query.order_by(_to_sql_expression(folders_v2, order_by)) + .offset(offset) + .limit(limit) + ) async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: total_count = await conn.scalar(count_query) @@ -255,8 +264,56 @@ async def list_( # pylint: disable=too-many-arguments,too-many-branches return cast(int, total_count), folders +async def list_trashed_folders( + app: web.Application, + connection: AsyncConnection | None = None, + *, + # filter + trashed_explicitly: bool | UnSet = UnSet.VALUE, + trashed_before: datetime | UnSet = UnSet.VALUE, + # pagination + offset: NonNegativeInt, + limit: int, + # order + order_by: OrderBy, +) -> tuple[int, list[FolderDB]]: + """ + NOTE: this is app-wide i.e. no product, user or workspace filtered + """ + base_query = sql.select(*_FOLDER_DB_MODEL_COLS).where( + folders_v2.c.trashed.is_not(None) + ) + + if is_set(trashed_explicitly): + assert isinstance(trashed_explicitly, bool) # nosec + base_query = base_query.where( + folders_v2.c.trashed_explicitly.is_(trashed_explicitly) + ) + + if is_set(trashed_before): + assert isinstance(trashed_before, datetime) # nosec + base_query = base_query.where(folders_v2.c.trashed < trashed_before) + + # Select total count from base_query + count_query = sql.select(sql.func.count()).select_from(base_query.subquery()) + + # Ordering and pagination + list_query = ( + base_query.order_by(_to_sql_expression(folders_v2, order_by)) + .offset(offset) + .limit(limit) + ) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + total_count = await conn.scalar(count_query) + + result = await conn.stream(list_query) + folders: list[FolderDB] = [FolderDB.model_validate(row) async for row in result] + return cast(int, total_count), folders + + def _create_base_select_query(folder_id: FolderID, product_name: ProductName) -> Select: - return select(*_FOLDER_DB_MODEL_COLS,).where( + return sql.select(*_FOLDER_DB_MODEL_COLS,).where( (folders_v2.c.product_name == product_name) & (folders_v2.c.folder_id == folder_id) ) @@ -343,7 +400,7 @@ async def update( ) query = ( - (folders_v2.update().values(modified=func.now(), **updated)) + (folders_v2.update().values(modified=sql.func.now(), **updated)) .where(folders_v2.c.product_name == product_name) .returning(*_FOLDER_DB_MODEL_COLS) ) @@ -372,7 +429,7 @@ async def delete_recursively( ) -> None: async with transaction_context(get_asyncpg_engine(app), connection) as conn: # Step 1: Define the base case for the recursive CTE - base_query = select( + base_query = sql.select( folders_v2.c.folder_id, folders_v2.c.parent_folder_id ).where( (folders_v2.c.folder_id == folder_id) # <-- specified folder id @@ -382,7 +439,7 @@ async def delete_recursively( # Step 2: Define the recursive case folder_alias = aliased(folders_v2) - recursive_query = select( + recursive_query = sql.select( folder_alias.c.folder_id, folder_alias.c.parent_folder_id ).select_from( folder_alias.join( @@ -395,7 +452,7 @@ async def delete_recursively( folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query) # Step 4: Execute the query to get all descendants - final_query = select(folder_hierarchy_cte) + final_query = sql.select(folder_hierarchy_cte) result = await conn.stream(final_query) # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)] rows = [row async for row in result] @@ -410,6 +467,24 @@ async def delete_recursively( ) +def _create_folder_hierarchy_cte(base_query: Select): + folder_hierarchy_cte = base_query.cte(name="folder_hierarchy", recursive=True) + + # Step 2: Define the recursive case + folder_alias = aliased(folders_v2) + recursive_query = sql.select( + folder_alias.c.folder_id, folder_alias.c.parent_folder_id + ).select_from( + folder_alias.join( + folder_hierarchy_cte, + folder_alias.c.parent_folder_id == folder_hierarchy_cte.c.folder_id, + ) + ) + + # Step 3: Combine base and recursive cases into a CTE + return folder_hierarchy_cte.union_all(recursive_query) + + async def get_projects_recursively_only_if_user_is_owner( app: web.Application, connection: AsyncConnection | None = None, @@ -430,36 +505,24 @@ async def get_projects_recursively_only_if_user_is_owner( async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: # Step 1: Define the base case for the recursive CTE - base_query = select( + base_query = sql.select( folders_v2.c.folder_id, folders_v2.c.parent_folder_id ).where( (folders_v2.c.folder_id == folder_id) # <-- specified folder id & (folders_v2.c.product_name == product_name) ) - folder_hierarchy_cte = base_query.cte(name="folder_hierarchy", recursive=True) - - # Step 2: Define the recursive case - folder_alias = aliased(folders_v2) - recursive_query = select( - folder_alias.c.folder_id, folder_alias.c.parent_folder_id - ).select_from( - folder_alias.join( - folder_hierarchy_cte, - folder_alias.c.parent_folder_id == folder_hierarchy_cte.c.folder_id, - ) - ) - # Step 3: Combine base and recursive cases into a CTE - folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query) + # Step 2,3 + folder_hierarchy_cte = _create_folder_hierarchy_cte(base_query) # Step 4: Execute the query to get all descendants - final_query = select(folder_hierarchy_cte) + final_query = sql.select(folder_hierarchy_cte) result = await conn.stream(final_query) # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)] folder_ids = [item[0] async for item in result] query = ( - select(projects_to_folders.c.project_uuid) + sql.select(projects_to_folders.c.project_uuid) .join(projects) .where( (projects_to_folders.c.folder_id.in_(folder_ids)) @@ -488,35 +551,23 @@ async def get_all_folders_and_projects_ids_recursively( async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: # Step 1: Define the base case for the recursive CTE - base_query = select( + base_query = sql.select( folders_v2.c.folder_id, folders_v2.c.parent_folder_id ).where( (folders_v2.c.folder_id == folder_id) # <-- specified folder id & (folders_v2.c.product_name == product_name) ) - folder_hierarchy_cte = base_query.cte(name="folder_hierarchy", recursive=True) - - # Step 2: Define the recursive case - folder_alias = aliased(folders_v2) - recursive_query = select( - folder_alias.c.folder_id, folder_alias.c.parent_folder_id - ).select_from( - folder_alias.join( - folder_hierarchy_cte, - folder_alias.c.parent_folder_id == folder_hierarchy_cte.c.folder_id, - ) - ) - # Step 3: Combine base and recursive cases into a CTE - folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query) + # Step 2, 3 + folder_hierarchy_cte = _create_folder_hierarchy_cte(base_query) # Step 4: Execute the query to get all descendants - final_query = select(folder_hierarchy_cte) + final_query = sql.select(folder_hierarchy_cte) result = await conn.stream(final_query) # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)] folder_ids = [item.folder_id async for item in result] - query = select(projects_to_folders.c.project_uuid).where( + query = sql.select(projects_to_folders.c.project_uuid).where( (projects_to_folders.c.folder_id.in_(folder_ids)) & (projects_to_folders.c.user_id == private_workspace_user_id_or_none) ) @@ -537,36 +588,24 @@ async def get_folders_recursively( async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: # Step 1: Define the base case for the recursive CTE - base_query = select( + base_query = sql.select( folders_v2.c.folder_id, folders_v2.c.parent_folder_id ).where( (folders_v2.c.folder_id == folder_id) # <-- specified folder id & (folders_v2.c.product_name == product_name) ) - folder_hierarchy_cte = base_query.cte(name="folder_hierarchy", recursive=True) - - # Step 2: Define the recursive case - folder_alias = aliased(folders_v2) - recursive_query = select( - folder_alias.c.folder_id, folder_alias.c.parent_folder_id - ).select_from( - folder_alias.join( - folder_hierarchy_cte, - folder_alias.c.parent_folder_id == folder_hierarchy_cte.c.folder_id, - ) - ) - # Step 3: Combine base and recursive cases into a CTE - folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query) + # Step 2, 3 + folder_hierarchy_cte = _create_folder_hierarchy_cte(base_query) # Step 4: Execute the query to get all descendants - final_query = select(folder_hierarchy_cte) + final_query = sql.select(folder_hierarchy_cte) result = await conn.stream(final_query) return cast(list[FolderID], [row.folder_id async for row in result]) def _select_trashed_by_primary_gid_query(): - return sa.select( + return sa.sql.select( folders_v2.c.folder_id, users.c.primary_gid.label("trashed_by_primary_gid"), ).select_from( diff --git a/services/web/server/src/simcore_service_webserver/folders/_trash_service.py b/services/web/server/src/simcore_service_webserver/folders/_trash_service.py index 4b5c6f3cdf00..f5003ee3dba1 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_trash_service.py +++ b/services/web/server/src/simcore_service_webserver/folders/_trash_service.py @@ -19,7 +19,7 @@ from ..projects._trash_service import trash_project, untrash_project from ..workspaces.api import check_user_workspace_access from . import _folders_repository, _folders_service -from .errors import FolderNotTrashedError +from .errors import FolderBatchDeleteError, FolderNotTrashedError _logger = logging.getLogger(__name__) @@ -227,7 +227,7 @@ async def list_explicitly_trashed_folders( user_id=user_id, product_name=product_name, text=None, - trashed=True, + trashed=True, # NOTE: lists only expliclty trashed! offset=page_params.offset, limit=page_params.limit, order_by=OrderBy(field=IDStr("trashed"), direction=OrderDirection.ASC), @@ -278,3 +278,49 @@ async def delete_trashed_folder( await _folders_service.delete_folder( app, user_id=user_id, folder_id=folder_id, product_name=product_name ) + + +async def batch_delete_trashed_folders_as_admin( + app: web.Application, + trashed_before: datetime, + *, + product_name: ProductName, + fail_fast: bool, +) -> None: + """ + Raises: + FolderBatchDeleteError: if error and fail_fast=False + Exception: any other exception during delete_recursively + """ + errors: list[tuple[FolderID, Exception]] = [] + + for page_params in iter_pagination_params(limit=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE): + ( + page_params.total_number_of_items, + expired_trashed_folders, + ) = await _folders_repository.list_trashed_folders( + app, + trashed_explicitly=True, + trashed_before=trashed_before, + offset=page_params.offset, + limit=page_params.limit, + order_by=OrderBy(field=IDStr("trashed"), direction=OrderDirection.ASC), + ) + + # BATCH delete + for folder in expired_trashed_folders: + try: + await _folders_repository.delete_recursively( + app, folder_id=folder.folder_id, product_name=product_name + ) + # NOTE: projects in folders are NOT deleted + + except Exception as err: # pylint: disable=broad-exception-caught + if fail_fast: + raise + errors.append((folder.folder_id, err)) + + if errors: + raise FolderBatchDeleteError( + errors=errors, trashed_before=trashed_before, product_name=product_name + ) diff --git a/services/web/server/src/simcore_service_webserver/folders/errors.py b/services/web/server/src/simcore_service_webserver/folders/errors.py index 6dc9b93cc54c..e8f2e346868b 100644 --- a/services/web/server/src/simcore_service_webserver/folders/errors.py +++ b/services/web/server/src/simcore_service_webserver/folders/errors.py @@ -29,3 +29,7 @@ class FolderNotTrashedError(FoldersRuntimeError): msg_template = ( "Cannot delete folder {folder_id} since it was not trashed first: {reason}" ) + + +class FolderBatchDeleteError(FoldersRuntimeError): + msg_template = "One or more folders could not be deleted: {errors}" diff --git a/services/web/server/src/simcore_service_webserver/folders/folders_trash_service.py b/services/web/server/src/simcore_service_webserver/folders/folders_trash_service.py index 9d1b012731e9..505f72f257f3 100644 --- a/services/web/server/src/simcore_service_webserver/folders/folders_trash_service.py +++ b/services/web/server/src/simcore_service_webserver/folders/folders_trash_service.py @@ -1,6 +1,11 @@ -from ._trash_service import delete_trashed_folder, list_explicitly_trashed_folders +from ._trash_service import ( + batch_delete_trashed_folders_as_admin, + delete_trashed_folder, + list_explicitly_trashed_folders, +) __all__: tuple[str, ...] = ( + "batch_delete_trashed_folders_as_admin", "delete_trashed_folder", "list_explicitly_trashed_folders", ) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py index 47d5e7212f23..46df72c0a708 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py @@ -8,11 +8,12 @@ from collections.abc import AsyncIterator, Callable from aiohttp import web +from servicelib.logging_utils import log_context from tenacity import retry from tenacity.before_sleep import before_sleep_log from tenacity.wait import wait_exponential -from ..trash._service import prune_trash +from ..trash import trash_service _logger = logging.getLogger(__name__) @@ -28,11 +29,8 @@ before_sleep=before_sleep_log(_logger, logging.WARNING), ) async def _run_task(app: web.Application): - if deleted := await prune_trash(app): - for name in deleted: - _logger.info("Trash item %s expired and was deleted", f"{name}") - else: - _logger.info("No trash items expired") + with log_context(_logger, logging.INFO, "Deleting expired trashed items"): + await trash_service.safe_delete_expired_trash_as_admin(app) async def _run_periodically(app: web.Application, wait_interval_s: float): diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py b/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py index 3e76c6c947ce..7374eccd5611 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py @@ -3,17 +3,13 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from servicelib.logging_utils import set_parent_module_log_level -from simcore_service_webserver.garbage_collector._tasks_trash import ( - create_background_task_to_prune_trash, -) +from simcore_service_webserver.products.plugin import setup_products from ..application_settings import get_application_settings from ..login.plugin import setup_login_storage from ..projects.db import setup_projects_db from ..socketio.plugin import setup_socketio -from ._tasks_api_keys import create_background_task_to_prune_api_keys -from ._tasks_core import run_background_task -from ._tasks_users import create_background_task_for_trial_accounts +from . import _tasks_api_keys, _tasks_core, _tasks_trash, _tasks_users from .settings import get_plugin_settings _logger = logging.getLogger(__name__) @@ -26,6 +22,9 @@ logger=_logger, ) def setup_garbage_collector(app: web.Application) -> None: + # for trashing + setup_products(app) + # - project-api needs access to db setup_projects_db(app) # - project needs access to socketio via notify_project_state_update @@ -35,7 +34,7 @@ def setup_garbage_collector(app: web.Application) -> None: settings = get_plugin_settings(app) - app.cleanup_ctx.append(run_background_task) + app.cleanup_ctx.append(_tasks_core.run_background_task) set_parent_module_log_level( _logger.name, min(logging.INFO, get_application_settings(app).log_level) @@ -48,10 +47,17 @@ def setup_garbage_collector(app: web.Application) -> None: # If more tasks of this nature are needed, we should setup some sort of registration mechanism # with a interface such that plugins can pass tasks to the GC plugin to handle them interval_s = settings.GARBAGE_COLLECTOR_EXPIRED_USERS_CHECK_INTERVAL_S - app.cleanup_ctx.append(create_background_task_for_trial_accounts(interval_s)) + app.cleanup_ctx.append( + _tasks_users.create_background_task_for_trial_accounts(interval_s) + ) # SEE https://github.com/ITISFoundation/osparc-issues/issues/705 wait_period_s = settings.GARBAGE_COLLECTOR_PRUNE_APIKEYS_INTERVAL_S - app.cleanup_ctx.append(create_background_task_to_prune_api_keys(wait_period_s)) + app.cleanup_ctx.append( + _tasks_api_keys.create_background_task_to_prune_api_keys(wait_period_s) + ) - app.cleanup_ctx.append(create_background_task_to_prune_trash(wait_period_s)) + # SEE https://github.com/ITISFoundation/osparc-issues#468 + app.cleanup_ctx.append( + _tasks_trash.create_background_task_to_prune_trash(wait_period_s) + ) diff --git a/services/web/server/src/simcore_service_webserver/products/_api.py b/services/web/server/src/simcore_service_webserver/products/_api.py index ce2c03b87966..5b7a3532ea00 100644 --- a/services/web/server/src/simcore_service_webserver/products/_api.py +++ b/services/web/server/src/simcore_service_webserver/products/_api.py @@ -42,6 +42,12 @@ def list_products(app: web.Application) -> list[Product]: return products +async def list_products_names(app: web.Application) -> list[ProductName]: + repo = ProductRepository.create_from_app(app) + names: list[ProductName] = await repo.list_products_names() + return names + + async def get_current_product_credit_price_info( request: web.Request, ) -> ProductPriceInfo | None: diff --git a/services/web/server/src/simcore_service_webserver/products/_db.py b/services/web/server/src/simcore_service_webserver/products/_db.py index a481c0f993e1..311d90bba069 100644 --- a/services/web/server/src/simcore_service_webserver/products/_db.py +++ b/services/web/server/src/simcore_service_webserver/products/_db.py @@ -86,6 +86,13 @@ async def iter_products(conn: SAConnection) -> AsyncIterator[ResultProxy]: class ProductRepository(BaseRepository): + async def list_products_names(self) -> list[ProductName]: + async with self.engine.acquire() as conn: + query = sa.select(products.c.name).order_by(products.c.priority) + result = await conn.execute(query) + rows = await result.fetchall() + return [ProductName(row.name) for row in rows] + async def get_product(self, product_name: str) -> Product | None: async with self.engine.acquire() as conn: result: ResultProxy = await conn.execute( diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_db.py b/services/web/server/src/simcore_service_webserver/projects/_projects_db.py index c9571de658d7..8787699fe832 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_db.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_db.py @@ -1,9 +1,17 @@ import logging +from collections.abc import Callable +from datetime import datetime +from typing import cast import sqlalchemy as sa from aiohttp import web +from common_library.exclude import UnSet, is_set +from models_library.basic_types import IDStr from models_library.groups import GroupID from models_library.projects import ProjectID +from models_library.rest_ordering import OrderBy, OrderDirection +from models_library.rest_pagination import MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE +from pydantic import NonNegativeInt, PositiveInt from simcore_postgres_database.models.projects import projects from simcore_postgres_database.models.users import users from simcore_postgres_database.utils_repos import ( @@ -21,7 +29,7 @@ _logger = logging.getLogger(__name__) -PROJECT_DB_COLS = get_columns_from_db_model( # noqa: RUF012 +PROJECT_DB_COLS = get_columns_from_db_model( # NOTE: MD: I intentionally didn't include the workbench. There is a special interface # for the workbench, and at some point, this column should be removed from the table. # The same holds true for access_rights/ui/classifiers/quality, but we have decided to proceed step by step. @@ -29,30 +37,80 @@ ProjectDBGet, ) +_OLDEST_TRASHED_FIRST = OrderBy(field=IDStr("trashed"), direction=OrderDirection.ASC) -async def patch_project( + +def _to_sql_expression(table: sa.Table, order_by: OrderBy): + direction_func: Callable = { + OrderDirection.ASC: sql.asc, + OrderDirection.DESC: sql.desc, + }[order_by.direction] + return direction_func(table.columns[order_by.field]) + + +async def list_trashed_projects( + app: web.Application, + connection: AsyncConnection | None = None, + *, + # filter + trashed_explicitly: bool | UnSet = UnSet.VALUE, + trashed_before: datetime | UnSet = UnSet.VALUE, + # pagination + offset: NonNegativeInt = 0, + limit: PositiveInt = MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, + # order + order_by: OrderBy = _OLDEST_TRASHED_FIRST, +) -> tuple[int, list[ProjectDBGet]]: + + base_query = sql.select(PROJECT_DB_COLS).where(projects.c.trashed.is_not(None)) + + if is_set(trashed_explicitly): + assert isinstance(trashed_explicitly, bool) # nosec + base_query = base_query.where( + projects.c.trashed_explicitly.is_(trashed_explicitly) + ) + + if is_set(trashed_before): + assert isinstance(trashed_before, datetime) # nosec + base_query = base_query.where(projects.c.trashed < trashed_before) + + # Select total count from base_query + count_query = sql.select(sql.func.count()).select_from(base_query.subquery()) + + # Ordering and pagination + list_query = ( + base_query.order_by(_to_sql_expression(projects, order_by)) + .offset(offset) + .limit(limit) + ) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + total_count = await conn.scalar(count_query) + + result = await conn.stream(list_query) + projects_list: list[ProjectDBGet] = [ + ProjectDBGet.model_validate(row) async for row in result + ] + return cast(int, total_count), projects_list + + +async def get_project( app: web.Application, connection: AsyncConnection | None = None, *, project_uuid: ProjectID, - new_partial_project_data: dict, ) -> ProjectDBGet: - - async with transaction_context(get_asyncpg_engine(app), connection) as conn: - result = await conn.stream( - projects.update() - .values(last_change_date=sa.func.now(), **new_partial_project_data) - .where(projects.c.uuid == f"{project_uuid}") - .returning(*PROJECT_DB_COLS) - ) - row = await result.first() + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + query = sql.select(*PROJECT_DB_COLS).where(projects.c.uuid == f"{project_uuid}") + result = await conn.execute(query) + row = result.one_or_none() if row is None: raise ProjectNotFoundError(project_uuid=project_uuid) return ProjectDBGet.model_validate(row) def _select_trashed_by_primary_gid_query() -> sql.Select: - return sa.select( + return sql.select( projects.c.uuid, users.c.primary_gid.label("trashed_by_primary_gid"), ).select_from(projects.outerjoin(users, projects.c.trashed_by == users.c.id)) @@ -65,12 +123,12 @@ async def get_trashed_by_primary_gid( projects_uuid: ProjectID, ) -> GroupID | None: query = _select_trashed_by_primary_gid_query().where( - projects.c.uuid == projects_uuid + projects.c.uuid == f"{projects_uuid}" ) async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: result = await conn.execute(query) - row = result.first() + row = result.one_or_none() return row.trashed_by_primary_gid if row else None @@ -97,7 +155,7 @@ async def batch_get_trashed_by_primary_gid( ).order_by( # Preserves the order of folders_ids # SEE https://docs.sqlalchemy.org/en/20/core/sqlelement.html#sqlalchemy.sql.expression.case - sa.case( + sql.case( { project_uuid: index for index, project_uuid in enumerate(projects_uuids_str) @@ -109,4 +167,45 @@ async def batch_get_trashed_by_primary_gid( result = await conn.stream(query) rows = {row.uuid: row.trashed_by_primary_gid async for row in result} - return [rows.get(uuid) for uuid in projects_uuids_str] + return [rows.get(project_uuid) for project_uuid in projects_uuids_str] + + +async def patch_project( + app: web.Application, + connection: AsyncConnection | None = None, + *, + project_uuid: ProjectID, + new_partial_project_data: dict, +) -> ProjectDBGet: + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream( + projects.update() + .values( + **new_partial_project_data, + last_change_date=sql.func.now(), + ) + .where(projects.c.uuid == f"{project_uuid}") + .returning(*PROJECT_DB_COLS) + ) + row = await result.one_or_none() + if row is None: + raise ProjectNotFoundError(project_uuid=project_uuid) + return ProjectDBGet.model_validate(row) + + +async def delete_project( + app: web.Application, + connection: AsyncConnection | None = None, + *, + project_uuid: ProjectID, +) -> ProjectDBGet: + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream( + projects.delete() + .where(projects.c.uuid == f"{project_uuid}") + .returning(*PROJECT_DB_COLS) + ) + row = await result.one_or_none() + if row is None: + raise ProjectNotFoundError(project_uuid=project_uuid) + return ProjectDBGet.model_validate(row) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py new file mode 100644 index 000000000000..bd021a4810ff --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py @@ -0,0 +1,108 @@ +import asyncio +import logging +import time +from contextlib import contextmanager +from typing import Any, Protocol + +from aiohttp import web +from models_library.projects import ProjectID +from models_library.users import UserID +from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE +from servicelib.redis._errors import ProjectLockError + +from ..director_v2 import api as director_v2_service +from . import _projects_db as _projects_repository +from . import projects_service +from .exceptions import ProjectDeleteError, ProjectNotFoundError + +_logger = logging.getLogger(__name__) + + +@contextmanager +def _monitor_step(steps: dict[str, Any], *, name: str, elapsed: bool = False): + # util + start_time = time.perf_counter() + steps[name] = {"status": "starting"} + try: + yield + except Exception as exc: + steps[name]["status"] = "raised" + steps[name]["exception"] = f"{exc.__class__.__name__}:{exc}" + raise + else: + steps[name]["status"] = "success" + finally: + if elapsed: + steps[name]["elapsed"] = time.perf_counter() - start_time + + +class StopServicesCallback(Protocol): + async def __call__(self, app: web.Application, project_uuid: ProjectID) -> None: + ... + + +async def batch_stop_services_in_project( + app: web.Application, *, user_id: UserID, project_uuid: ProjectID +) -> None: + await asyncio.gather( + director_v2_service.stop_pipeline( + app, user_id=user_id, project_id=project_uuid + ), + projects_service.remove_project_dynamic_services( + user_id=user_id, + project_uuid=f"{project_uuid}", + app=app, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + notify_users=False, + ), + ) + + +async def delete_project_as_admin( + app: web.Application, + *, + project_uuid: ProjectID, +): + + state: dict[str, Any] = {} + + try: + # 1. hide + with _monitor_step(state, name="hide"): + project = await _projects_repository.patch_project( + app, + project_uuid=project_uuid, + new_partial_project_data={"hidden": True}, + ) + + # 2. stop + with _monitor_step(state, name="stop", elapsed=True): + # NOTE: this callback could take long or raise whatever! + await batch_stop_services_in_project( + app, user_id=project.prj_owner, project_uuid=project_uuid + ) + + # 3. delete + with _monitor_step(state, name="delete"): + await _projects_repository.delete_project(app, project_uuid=project_uuid) + + except ProjectNotFoundError as err: + _logger.debug( + "Project %s being deleted is already gone. IGNORING error. Details: %s", + project_uuid, + err, + ) + + except ProjectLockError as err: + raise ProjectDeleteError( + project_uuid=project_uuid, + reason=f"Cannot delete project {project_uuid} because it is currently in use. Details: {err}", + state=state, + ) from err + + except Exception as err: + raise ProjectDeleteError( + project_uuid=project_uuid, + reason=f"Unexpected error. Deletion sequence: {state=}", + state=state, + ) from err diff --git a/services/web/server/src/simcore_service_webserver/projects/_trash_service.py b/services/web/server/src/simcore_service_webserver/projects/_trash_service.py index df0e342946dc..1ec1c81d5341 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_trash_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_trash_service.py @@ -1,4 +1,3 @@ -import asyncio import logging from datetime import datetime @@ -12,17 +11,20 @@ from models_library.rest_pagination import MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE from models_library.users import UserID from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY -from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from servicelib.utils import fire_and_forget_task from ..director_v2 import api as director_v2_api from ..dynamic_scheduler import api as dynamic_scheduler_api -from . import _crud_api_read, projects_service +from . import _crud_api_read +from . import _projects_db as _projects_repository +from . import _projects_service_delete, projects_service from ._access_rights_api import check_user_project_permission +from ._projects_db import _OLDEST_TRASHED_FIRST from .exceptions import ( ProjectNotFoundError, ProjectNotTrashedError, ProjectRunningConflictError, + ProjectsBatchDeleteError, ) from .models import ProjectDict, ProjectPatchInternalExtended @@ -71,22 +73,10 @@ async def trash_project( if force_stop_first: - async def _schedule(): - await asyncio.gather( - director_v2_api.stop_pipeline( - app, user_id=user_id, project_id=project_id - ), - projects_service.remove_project_dynamic_services( - user_id=user_id, - project_uuid=f"{project_id}", - app=app, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - notify_users=False, - ), - ) - fire_and_forget_task( - _schedule(), + _projects_service_delete.batch_stop_services_in_project( + app, user_id=user_id, project_uuid=project_id + ), task_suffix_name=f"trash_project_force_stop_first_{user_id=}_{project_id=}", fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY], ) @@ -237,3 +227,51 @@ async def delete_explicitly_trashed_project( user_id=user_id, project_uuid=project_id, ) + + +async def batch_delete_trashed_projects_as_admin( + app: web.Application, + *, + trashed_before: datetime, + fail_fast: bool, +) -> list[ProjectID]: + + deleted_project_ids: list[ProjectID] = [] + errors: list[tuple[ProjectID, Exception]] = [] + + for page_params in iter_pagination_params(limit=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE): + ( + page_params.total_number_of_items, + expired_trashed_projects, + ) = await _projects_repository.list_trashed_projects( + app, + # both implicit and explicitly trashed + trashed_before=trashed_before, + offset=page_params.offset, + limit=page_params.limit, + order_by=_OLDEST_TRASHED_FIRST, + ) + # BATCH delete + for project in expired_trashed_projects: + + assert project.trashed # nosec + + try: + await _projects_service_delete.delete_project_as_admin( + app, + project_uuid=project.uuid, + ) + deleted_project_ids.append(project.uuid) + except Exception as err: # pylint: disable=broad-exception-caught + if fail_fast: + raise + errors.append((project.uuid, err)) + + if errors: + raise ProjectsBatchDeleteError( + errors=errors, + trashed_before=trashed_before, + deleted_project_ids=deleted_project_ids, + ) + + return deleted_project_ids diff --git a/services/web/server/src/simcore_service_webserver/projects/exceptions.py b/services/web/server/src/simcore_service_webserver/projects/exceptions.py index 0cec5bb84b66..8c270f99df5f 100644 --- a/services/web/server/src/simcore_service_webserver/projects/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/exceptions.py @@ -79,6 +79,10 @@ def __init__(self, *, project_uuid, reason, **ctx): self.reason = reason +class ProjectsBatchDeleteError(BaseProjectError): + msg_template = "One or more projects could not be deleted in the batch: {errors}" + + class ProjectTrashError(BaseProjectError): ... diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_trash_service.py b/services/web/server/src/simcore_service_webserver/projects/projects_trash_service.py index 6548eae4567c..2270ca66e6c8 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_trash_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_trash_service.py @@ -1,9 +1,11 @@ from ._trash_service import ( + batch_delete_trashed_projects_as_admin, delete_explicitly_trashed_project, list_explicitly_trashed_projects, ) __all__: tuple[str, ...] = ( + "batch_delete_trashed_projects_as_admin", "delete_explicitly_trashed_project", "list_explicitly_trashed_projects", ) diff --git a/services/web/server/src/simcore_service_webserver/trash/_rest.py b/services/web/server/src/simcore_service_webserver/trash/_rest.py index 9cb7bd62d9cc..ac61d4c735ff 100644 --- a/services/web/server/src/simcore_service_webserver/trash/_rest.py +++ b/services/web/server/src/simcore_service_webserver/trash/_rest.py @@ -58,7 +58,7 @@ async def empty_trash(request: web.Request): product_name = get_product_name(request) fire_and_forget_task( - _service.empty_trash_safe( + _service.safe_empty_trash( request.app, product_name=product_name, user_id=user_id ), task_suffix_name="rest.empty_trash", diff --git a/services/web/server/src/simcore_service_webserver/trash/_service.py b/services/web/server/src/simcore_service_webserver/trash/_service.py index 5e3ec8ed6dd2..42a5f6f0626c 100644 --- a/services/web/server/src/simcore_service_webserver/trash/_service.py +++ b/services/web/server/src/simcore_service_webserver/trash/_service.py @@ -1,4 +1,3 @@ -import asyncio import logging from datetime import timedelta from typing import Final @@ -9,6 +8,7 @@ from models_library.users import UserID from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.logging_utils import log_context +from simcore_service_webserver.products import _api as products_service from ..folders import folders_trash_service from ..projects import projects_trash_service @@ -62,7 +62,7 @@ async def _empty_explicitly_trashed_projects( ) -async def _empty_trashed_folders( +async def _empty_explicitly_trashed_folders_and_content( app: web.Application, product_name: ProductName, user_id: UserID ): trashed_folders_ids = await folders_trash_service.list_explicitly_trashed_folders( @@ -99,27 +99,72 @@ async def _empty_trashed_folders( ) -async def empty_trash_safe( +async def safe_empty_trash( app: web.Application, *, product_name: ProductName, user_id: UserID ): await _empty_explicitly_trashed_projects(app, product_name, user_id) - await _empty_trashed_folders(app, product_name, user_id) + await _empty_explicitly_trashed_folders_and_content(app, product_name, user_id) -async def prune_trash(app: web.Application) -> list[str]: - """Deletes expired items in the trash""" +async def safe_delete_expired_trash_as_admin(app: web.Application) -> None: settings = get_plugin_settings(app) - - # app-wide retention = timedelta(days=settings.TRASH_RETENTION_DAYS) - expiration_dt = arrow.now().datetime - retention + delete_until = arrow.now().datetime - retention + + app_products_names = await products_service.list_products_names(app) + + for product_name in app_products_names: + + ctx = { + "delete_until": delete_until, + "retention": retention, + "product_name": product_name, + } + + with log_context( + _logger, + logging.DEBUG, + "Deleting items marked as trashed before %s in %s [trashed_at < %s will be deleted]", + retention, + product_name, + delete_until, + ): + try: - _logger.debug( - "CODE PLACEHOLDER: **ALL** items marked as trashed during %s days are deleted (those marked before %s)", - retention, - expiration_dt, - ) - await asyncio.sleep(5) + await folders_trash_service.batch_delete_trashed_folders_as_admin( + app, + trashed_before=delete_until, + product_name=product_name, + fail_fast=False, + ) + + except Exception as exc: # pylint: disable=broad-exception-caught + _logger.warning( + **create_troubleshotting_log_kwargs( + "Error batch deleting expired trashed folders as admin.", + error=exc, + error_context=ctx, + ) + ) - return [] + try: + + deleted_project_ids = ( + await projects_trash_service.batch_delete_trashed_projects_as_admin( + app, + trashed_before=delete_until, + fail_fast=False, + ) + ) + + _logger.info("Deleted %d trashed projects", len(deleted_project_ids)) + + except Exception as exc: # pylint: disable=broad-exception-caught + _logger.warning( + **create_troubleshotting_log_kwargs( + "Error batch deleting expired projects as admin.", + error=exc, + error_context=ctx, + ) + ) diff --git a/services/web/server/src/simcore_service_webserver/trash/trash_service.py b/services/web/server/src/simcore_service_webserver/trash/trash_service.py new file mode 100644 index 000000000000..3cd438f4e1c4 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/trash/trash_service.py @@ -0,0 +1,4 @@ +from ._service import safe_delete_expired_trash_as_admin + +__all__: tuple[str, ...] = ("safe_delete_expired_trash_as_admin",) +# nopycln: file diff --git a/services/web/server/tests/integration/01/test_garbage_collection.py b/services/web/server/tests/integration/01/test_garbage_collection.py index 62075ff6ba09..57f23d093029 100644 --- a/services/web/server/tests/integration/01/test_garbage_collection.py +++ b/services/web/server/tests/integration/01/test_garbage_collection.py @@ -198,7 +198,7 @@ async def _fake_background_task(app: web.Application): await asyncio.sleep(0.1) return mocker.patch( - "simcore_service_webserver.garbage_collector.plugin.run_background_task", + "simcore_service_webserver.garbage_collector.plugin._tasks_core.run_background_task", side_effect=_fake_background_task, ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_repository.py b/services/web/server/tests/unit/with_dbs/02/test_projects_repository.py new file mode 100644 index 000000000000..4141f9527965 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_repository.py @@ -0,0 +1,192 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +from datetime import timedelta +from uuid import UUID + +import arrow +import pytest +from aiohttp.test_utils import TestClient +from common_library.users_enums import UserRole +from pytest_simcore.helpers.webserver_login import UserInfoDict +from simcore_service_webserver.projects import ( + _projects_db as projects_service_repository, +) +from simcore_service_webserver.projects.exceptions import ProjectNotFoundError +from simcore_service_webserver.projects.models import ProjectDBGet, ProjectDict + + +@pytest.fixture +def user_role() -> UserRole: + return UserRole.USER + + +async def test_get_project( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, +): + assert client.app + + # Get valid project + got_project = await projects_service_repository.get_project( + client.app, project_uuid=user_project["uuid"] + ) + + assert got_project.uuid == UUID(user_project["uuid"]) + assert got_project.name == user_project["name"] + assert got_project.description == user_project["description"] + + # Get non-existent project + non_existent_project_uuid = UUID("00000000-0000-0000-0000-000000000000") + with pytest.raises(ProjectNotFoundError): + await projects_service_repository.get_project( + client.app, project_uuid=non_existent_project_uuid + ) + + +async def test_patch_project( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, +): + assert client.app + + # This will change after in patched_project + assert user_project["creationDate"] == user_project["lastChangeDate"] + + # Patch valid project + patch_data = {"name": "Updated Project Name"} + patched_project = await projects_service_repository.patch_project( + client.app, + project_uuid=user_project["uuid"], + new_partial_project_data=patch_data, + ) + + assert patched_project.uuid == UUID(user_project["uuid"]) + assert patched_project.name == patch_data["name"] + assert patched_project.creation_date < patched_project.last_change_date + + # Patch non-existent project + non_existent_project_uuid = UUID("00000000-0000-0000-0000-000000000000") + with pytest.raises(ProjectNotFoundError): + await projects_service_repository.patch_project( + client.app, + project_uuid=non_existent_project_uuid, + new_partial_project_data=patch_data, + ) + + +async def test_delete_project( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, +): + assert client.app + + # Delete valid project + deleted_project = await projects_service_repository.delete_project( + client.app, project_uuid=user_project["uuid"] + ) + + assert deleted_project.uuid == UUID(user_project["uuid"]) + + # Check deleted + with pytest.raises(ProjectNotFoundError): + await projects_service_repository.delete_project( + client.app, project_uuid=user_project["uuid"] + ) + + # Delete non-existent project + non_existent_project_uuid = UUID("00000000-0000-0000-0000-000000000000") + with pytest.raises(ProjectNotFoundError): + await projects_service_repository.delete_project( + client.app, project_uuid=non_existent_project_uuid + ) + + +@pytest.fixture +async def trashed_project( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, +) -> ProjectDBGet: + assert client.app + + # Patch project to be trashed + trashed_at = arrow.utcnow().datetime + patch_data = { + "trashed": trashed_at, + "trashed_by": logged_user["id"], + "trashed_explicitly": True, + } + return await projects_service_repository.patch_project( + client.app, + project_uuid=user_project["uuid"], + new_partial_project_data=patch_data, + ) + + +async def test_list_trashed_projects(client: TestClient, trashed_project: ProjectDBGet): + assert client.app + + ( + total_count, + trashed_projects, + ) = await projects_service_repository.list_trashed_projects( + client.app, + trashed_explicitly=True, + trashed_before=arrow.utcnow().datetime + timedelta(days=1), + ) + + assert total_count == 1 + assert len(trashed_projects) == 1 + assert trashed_projects[0] == trashed_project + + +async def test_get_trashed_by_primary_gid( + client: TestClient, + logged_user: UserInfoDict, + trashed_project: ProjectDBGet, +): + assert client.app + + # Get trashed by primary gid + trashed_by_primary_gid = ( + await projects_service_repository.get_trashed_by_primary_gid( + client.app, + projects_uuid=trashed_project.uuid, + ) + ) + + assert trashed_by_primary_gid == logged_user["primary_gid"] + + +async def test_batch_get_trashed_by_primary_gid( + client: TestClient, + logged_user: UserInfoDict, + trashed_project: ProjectDBGet, +): + assert client.app + + non_existent_project_uuid = UUID("00000000-0000-0000-0000-000000000000") + + # Batch get trashed by primary gid + trashed_by_primary_gid = ( + await projects_service_repository.batch_get_trashed_by_primary_gid( + client.app, + projects_uuids=[ + trashed_project.uuid, + non_existent_project_uuid, # non-existent + trashed_project.uuid, # repeated + ], + ) + ) + + assert trashed_by_primary_gid == [ + logged_user["primary_gid"], + None, + logged_user["primary_gid"], + ] diff --git a/services/web/server/tests/unit/with_dbs/03/test_project_db.py b/services/web/server/tests/unit/with_dbs/03/test_project_db.py index f946c2a8f207..d62cac76a519 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/03/test_project_db.py @@ -13,7 +13,6 @@ from uuid import UUID, uuid5 import aiopg.sa -import arrow import pytest import sqlalchemy as sa from aiohttp.test_utils import TestClient @@ -32,9 +31,6 @@ from simcore_postgres_database.utils_projects_nodes import ProjectNodesRepo from simcore_service_webserver.projects._db_utils import PermissionStr from simcore_service_webserver.projects._groups_db import update_or_insert_project_group -from simcore_service_webserver.projects._projects_db import ( - batch_get_trashed_by_primary_gid, -) from simcore_service_webserver.projects.api import has_user_project_access_rights from simcore_service_webserver.projects.db import ProjectAccessRights, ProjectDBAPI from simcore_service_webserver.projects.exceptions import ( @@ -966,42 +962,3 @@ async def test_check_project_node_has_all_required_inputs_ok( project_uuid=UUID(inserted_project["uuid"]), node_id=UUID("324d6ef2-a82c-414d-9001-dc84da1cbea3"), ) - - -@pytest.mark.parametrize( - "user_role", - [UserRole.USER], -) -async def test_batch_get_trashed_by_primary_gid( - client: TestClient, - logged_user: UserInfoDict, - insert_project_in_db: Callable[..., Awaitable[dict[str, Any]]], - fake_project: ProjectDict, -): - assert client.app - - # Insert two different projects - fake_project_1 = deepcopy(fake_project) - fake_project_1["trashed_by"] = logged_user["id"] - fake_project_1["trashed"] = arrow.now().datetime - fake_project_1["trashed_explicitly"] = True - - project_1 = await insert_project_in_db(fake_project_1, user_id=logged_user["id"]) - project_2 = await insert_project_in_db(fake_project, user_id=logged_user["id"]) - - # Test with two different project UUIDs - trashed_by_primary_gid = await batch_get_trashed_by_primary_gid( - client.app, - projects_uuids=[project_1["uuid"], project_2["uuid"]], - ) - assert trashed_by_primary_gid == [logged_user["primary_gid"], None] - - # Test with two identical project UUIDs - trashed_by_primary_gid = await batch_get_trashed_by_primary_gid( - client.app, - projects_uuids=[project_1["uuid"], project_1["uuid"]], - ) - assert trashed_by_primary_gid == [ - logged_user["primary_gid"], - logged_user["primary_gid"], - ] diff --git a/services/web/server/tests/unit/with_dbs/03/trash/conftest.py b/services/web/server/tests/unit/with_dbs/03/trash/conftest.py new file mode 100644 index 000000000000..5c742b12144d --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/03/trash/conftest.py @@ -0,0 +1,102 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +import logging +from collections.abc import AsyncIterable, Callable +from pathlib import Path + +import pytest +from aiohttp import web +from aiohttp.test_utils import TestClient +from aioresponses import aioresponses +from models_library.products import ProductName +from pytest_mock import MockerFixture +from pytest_simcore.helpers.logging_tools import log_context +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem +from pytest_simcore.helpers.webserver_projects import NewProject +from simcore_service_webserver.projects.models import ProjectDict + +_logger = logging.getLogger(__name__) + + +@pytest.fixture +def app_environment( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, {"WEBSERVER_DEV_FEATURES_ENABLED": "1"} + ) + + +@pytest.fixture +async def other_user( + client: TestClient, logged_user: UserInfoDict +) -> AsyncIterable[UserInfoDict]: + # new user different from logged_user + async with NewUser( + { + "name": f"other_user_than_{logged_user['name']}", + "role": "USER", + }, + client.app, + ) as user: + yield user + + +@pytest.fixture +async def other_user_project( + client: TestClient, + fake_project: ProjectDict, + other_user: UserInfoDict, + tests_data_dir: Path, + osparc_product_name: ProductName, +) -> AsyncIterable[ProjectDict]: + async with NewProject( + fake_project, + client.app, + user_id=other_user["id"], + product_name=osparc_product_name, + tests_data_dir=tests_data_dir, + ) as project: + yield project + + +@pytest.fixture +def mocked_catalog( + user_project: ProjectDict, + catalog_subsystem_mock: Callable[[list[ProjectDict]], None], +): + catalog_subsystem_mock([user_project]) + + +@pytest.fixture +def mocked_director_v2(director_v2_service_mock: aioresponses): + ... + + +@pytest.fixture +def mocked_storage(storage_subsystem_mock: MockedStorageSubsystem): + ... + + +@pytest.fixture +def with_disabled_background_task_to_prune_trash(mocker: MockerFixture) -> None: + async def _empty_lifespan(app: web.Application): + with log_context( + logging.INFO, "Fake background_task_to_prune_trash event", logger=_logger + ): + yield + + mocker.patch( + "simcore_service_webserver.garbage_collector._tasks_trash.create_background_task_to_prune_trash", + autospec=True, + return_value=_empty_lifespan, + ) diff --git a/services/web/server/tests/unit/with_dbs/03/test_trash.py b/services/web/server/tests/unit/with_dbs/03/trash/test_trash.py similarity index 95% rename from services/web/server/tests/unit/with_dbs/03/test_trash.py rename to services/web/server/tests/unit/with_dbs/03/trash/test_trash.py index a1ec3bd8c977..86cfb24dd24c 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_trash.py +++ b/services/web/server/tests/unit/with_dbs/03/trash/test_trash.py @@ -7,24 +7,20 @@ import asyncio -from collections.abc import AsyncIterable, Callable +from collections.abc import AsyncIterable from unittest.mock import MagicMock from uuid import UUID import arrow import pytest from aiohttp.test_utils import TestClient -from aioresponses import aioresponses from models_library.api_schemas_webserver.folders_v2 import FolderGet from models_library.api_schemas_webserver.projects import ProjectGet, ProjectListItem from models_library.api_schemas_webserver.workspaces import WorkspaceGet from models_library.rest_pagination import Page from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict -from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict -from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects._groups_api import ProjectGroupGet @@ -33,38 +29,11 @@ from yarl import URL -@pytest.fixture -def app_environment( - app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch -) -> EnvVarsDict: - return app_environment | setenvs_from_dict( - monkeypatch, {"WEBSERVER_DEV_FEATURES_ENABLED": "1"} - ) - - @pytest.fixture def user_role() -> UserRole: return UserRole.USER -@pytest.fixture -def mocked_catalog( - user_project: ProjectDict, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], -): - catalog_subsystem_mock([user_project]) - - -@pytest.fixture -def mocked_director_v2(director_v2_service_mock: aioresponses): - ... - - -@pytest.fixture -def mocked_storage(storage_subsystem_mock: MockedStorageSubsystem): - ... - - @pytest.mark.acceptance_test( "For https://github.com/ITISFoundation/osparc-simcore/pull/6579" ) @@ -84,11 +53,11 @@ async def test_trash_projects( # noqa: PLR0915 # this test should emulate NO errors stopping services mock_remove_dynamic_services = mocker.patch( - "simcore_service_webserver.projects._trash_service.projects_service.remove_project_dynamic_services", + "simcore_service_webserver.projects._trash_service._projects_service_delete.projects_service.remove_project_dynamic_services", autospec=True, ) mock_stop_pipeline = mocker.patch( - "simcore_service_webserver.projects._trash_service.director_v2_api.stop_pipeline", + "simcore_service_webserver.projects._trash_service._projects_service_delete.director_v2_service.stop_pipeline", autospec=True, ) mocker.patch( @@ -190,21 +159,6 @@ async def test_trash_projects( # noqa: PLR0915 mock_remove_dynamic_services.assert_awaited() -@pytest.fixture -async def other_user( - client: TestClient, logged_user: UserInfoDict -) -> AsyncIterable[UserInfoDict]: - # new user different from logged_user - async with NewUser( - { - "name": f"other_user_than_{logged_user['name']}", - "role": "USER", - }, - client.app, - ) as user: - yield user - - async def test_trash_projects_shared_among_users( client: TestClient, logged_user: UserInfoDict, @@ -384,6 +338,7 @@ async def test_trash_folder_with_content( resp = await client.post("/v0/folders", json={"name": "My first folder"}) data, _ = await assert_status(resp, status.HTTP_201_CREATED) folder = FolderGet.model_validate(data) + assert folder.trashed_at is None # CREATE a SUB-folder resp = await client.post( @@ -427,11 +382,12 @@ async def test_trash_folder_with_content( resp = await client.post(f"/v0/folders/{folder.folder_id}:trash") await assert_status(resp, status.HTTP_204_NO_CONTENT) - # ONLY folder listed in trash. The rest is not listed anymore! + # ONLY folder listed in trash. The rest is not listed anymore since they are implicitly trashed! resp = await client.get("/v0/folders", params={"filters": '{"trashed": true}'}) await assert_status(resp, status.HTTP_200_OK) page = Page[FolderGet].model_validate(await resp.json()) assert page.meta.total == 1 + assert page.data[0].trashed_at is not None assert page.data[0].folder_id == folder.folder_id resp = await client.get( diff --git a/services/web/server/tests/unit/with_dbs/03/trash/test_trash_service.py b/services/web/server/tests/unit/with_dbs/03/trash/test_trash_service.py new file mode 100644 index 000000000000..94d67e5ec567 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/03/trash/test_trash_service.py @@ -0,0 +1,216 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +import contextlib +from collections.abc import AsyncIterator +from unittest.mock import MagicMock + +import pytest +from aiohttp.test_utils import TestClient +from models_library.api_schemas_webserver.projects import ProjectGet +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from pytest_simcore.helpers.webserver_login import UserInfoDict +from servicelib.aiohttp import status +from simcore_service_webserver.db.models import UserRole +from simcore_service_webserver.projects import _trash_service +from simcore_service_webserver.projects.models import ProjectDict +from simcore_service_webserver.trash import trash_service + + +@pytest.fixture +def app_environment( + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, + with_disabled_background_task_to_prune_trash: None, +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, + { + "TRASH_RETENTION_DAYS": "0", + "WEBSERVER_GARBAGE_COLLECTOR": "null", + }, + ) + + +@pytest.fixture +def user_role() -> UserRole: + return UserRole.USER + + +@contextlib.asynccontextmanager +async def _switch_client_session_to( + client: TestClient, user: UserInfoDict +) -> AsyncIterator[TestClient]: + assert client.app + + await client.post(f'{client.app.router["auth_logout"].url_for()}') + # sometimes 4xx if user already logged out. Ignore + + resp = await client.post( + f'{client.app.router["auth_login"].url_for()}', + json={ + "email": user["email"], + "password": user["raw_password"], + }, + ) + await assert_status(resp, status.HTTP_200_OK) + + yield client + + resp = await client.post(f'{client.app.router["auth_logout"].url_for()}') + await assert_status(resp, status.HTTP_200_OK) + + +async def test_trash_service__delete_expired_trash( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + other_user: UserInfoDict, + other_user_project: ProjectDict, + mocked_catalog: None, + mocked_director_v2: None, + mocked_dynamic_services_interface: dict[str, MagicMock], +): + assert client.app + assert logged_user["id"] != other_user["id"] + + # TRASH projects + # logged_user trashes his project + user_project_id = user_project["uuid"] + await _trash_service.trash_project( + client.app, + product_name="osparc", + user_id=logged_user["id"], + project_id=user_project_id, + force_stop_first=True, + explicit=True, + ) + + # other_user trashes his project + other_user_project_id = other_user_project["uuid"] + await _trash_service.trash_project( + client.app, + product_name="osparc", + user_id=other_user["id"], + project_id=other_user_project_id, + force_stop_first=True, + explicit=True, + ) + + resp = await client.get(f"/v0/projects/{user_project_id}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert ProjectGet.model_validate(data).trashed_by == logged_user["primary_gid"] + + # UNDER TEST: Run delete_expired_trash + await trash_service.safe_delete_expired_trash_as_admin(client.app) + + # ASSERT: logged_user tries to get the project and expects 404 + resp = await client.get(f"/v0/projects/{user_project_id}") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + # ASSERT: other_user tries to get the project and expects 404 + async with _switch_client_session_to(client, other_user): + resp = await client.get(f"/v0/projects/{other_user_project_id}") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + +async def test_trash_nested_folders_and_projects( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + other_user: UserInfoDict, + other_user_project: ProjectDict, + mocked_catalog: None, + mocked_director_v2: None, + mocked_dynamic_services_interface: dict[str, MagicMock], +): + assert client.app + assert logged_user["id"] != other_user["id"] + + async with _switch_client_session_to(client, logged_user): + # CREATE folders hierarchy for logged_user + resp = await client.post("/v0/folders", json={"name": "Root Folder"}) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + logged_user_root_folder = data + + resp = await client.post( + "/v0/folders", + json={ + "name": "Sub Folder", + "parentFolderId": logged_user_root_folder["folderId"], + }, + ) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + logged_user_sub_folder = data + + # MOVE project to subfolder + resp = await client.put( + f"/v0/projects/{user_project['uuid']}/folders/{logged_user_sub_folder['folderId']}" + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # TRASH root folders + resp = await client.post( + f"/v0/folders/{logged_user_root_folder['folderId']}:trash" + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + async with _switch_client_session_to(client, other_user): + # CREATE folders hierarchy for other_user + resp = await client.post("/v0/folders", json={"name": "Root Folder"}) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + other_user_root_folder = data + + resp = await client.post( + "/v0/folders", + json={ + "name": "Sub Folder (other)", + "parentFolderId": other_user_root_folder["folderId"], + }, + ) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + other_user_sub_folder = data + + # MOVE project to subfolder + resp = await client.put( + f"/v0/projects/{other_user_project['uuid']}/folders/{other_user_sub_folder['folderId']}" + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # TRASH root folders + resp = await client.post( + f"/v0/folders/{other_user_root_folder['folderId']}:trash" + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # UNDER TEST + await trash_service.safe_delete_expired_trash_as_admin(client.app) + + async with _switch_client_session_to(client, logged_user): + # Verify logged_user's resources are gone + resp = await client.get(f"/v0/folders/{logged_user_root_folder['folderId']}") + await assert_status(resp, status.HTTP_403_FORBIDDEN) + + resp = await client.get(f"/v0/folders/{logged_user_sub_folder['folderId']}") + await assert_status(resp, status.HTTP_403_FORBIDDEN) + + resp = await client.get(f"/v0/projects/{user_project['uuid']}") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + # Verify other_user's resources are gone + async with _switch_client_session_to(client, other_user): + resp = await client.get(f"/v0/folders/{other_user_root_folder['folderId']}") + await assert_status(resp, status.HTTP_403_FORBIDDEN) + + resp = await client.get(f"/v0/folders/{other_user_sub_folder['folderId']}") + await assert_status(resp, status.HTTP_403_FORBIDDEN) + + resp = await client.get(f"/v0/projects/{other_user_project['uuid']}") + await assert_status(resp, status.HTTP_404_NOT_FOUND) diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index d583e3c783e4..5b40152bea5a 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -631,7 +631,7 @@ async def user_project( fake_project: ProjectDict, logged_user: UserInfoDict, tests_data_dir: Path, - osparc_product_name: str, + osparc_product_name: ProductName, ) -> AsyncIterator[ProjectDict]: async with NewProject( fake_project,