Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions api/specs/web-server/_trash.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@
)


@router.delete(
"/trash",
@router.post(
"/trash:empty",
status_code=status.HTTP_204_NO_CONTENT,
)
def empty_trash():
Expand Down
72 changes: 72 additions & 0 deletions packages/common-library/src/common_library/pagination_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
from collections.abc import Iterable
from typing import Annotated

from pydantic import BaseModel, ConfigDict, Field, NonNegativeInt, PositiveInt


class PageParams(BaseModel):
offset_initial: Annotated[NonNegativeInt, Field(frozen=True)] = 0
offset_current: NonNegativeInt = 0 # running offset
limit: Annotated[PositiveInt, Field(frozen=True)]
total_number_of_items: int | None = None

model_config = ConfigDict(validate_assignment=True)

@property
def offset(self) -> NonNegativeInt:
return self.offset_current

def has_items_left(self) -> bool:
return (
self.total_number_of_items is None
or self.offset_current < self.total_number_of_items
)

def total_number_of_pages(self) -> NonNegativeInt:
assert self.total_number_of_items # nosec
num_items = self.total_number_of_items - self.offset_initial
return num_items // self.limit + (1 if num_items % self.limit else 0)


def iter_pagination_params(
offset: NonNegativeInt = 0,
limit: PositiveInt = 100,
total_number_of_items: NonNegativeInt | None = None,
) -> Iterable[PageParams]:

kwargs = {}
if total_number_of_items:
kwargs["total_number_of_items"] = total_number_of_items

page_params = PageParams(
offset_initial=offset, offset_current=offset, limit=limit, **kwargs
)

assert page_params.offset_current == page_params.offset_initial # nosec

total_count_before = page_params.total_number_of_items
page_index = 0

while page_params.has_items_left():

yield page_params

if page_params.total_number_of_items is None:
msg = "Must be updated at least before the first iteration, i.e. page_args.total_number_of_items = total_count"
raise RuntimeError(msg)

if (
total_count_before
and total_count_before != page_params.total_number_of_items
):
msg = (
f"total_number_of_items cannot change on every iteration: before={total_count_before}, now={page_params.total_number_of_items}."
"WARNING: the size of the paginated collection might be changing while it is being iterated?"
)
raise RuntimeError(msg)

if page_index == 0:
total_count_before = page_params.total_number_of_items

page_params.offset_current += limit
assert page_params.offset == page_params.offset_current # nosec
88 changes: 88 additions & 0 deletions packages/common-library/tests/test_pagination_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
# pylint: disable=unused-variable
# pylint: disable=too-many-arguments

import asyncio
from collections.abc import Callable

import pytest
from common_library.pagination_tools import iter_pagination_params
from pydantic import ValidationError


@pytest.fixture
def all_items() -> list[int]:
return list(range(11))


@pytest.fixture
async def get_page(all_items: list[int]) -> Callable:
async def _get_page(offset, limit) -> tuple[list[int], int]:
await asyncio.sleep(0)
return all_items[offset : offset + limit], len(all_items)

return _get_page


@pytest.mark.parametrize("limit", [2, 3, 5])
@pytest.mark.parametrize("offset", [0, 1, 5])
async def test_iter_pages_args(
limit: int, offset: int, get_page: Callable, all_items: list[int]
):

last_page = [None] * limit

num_items = len(all_items) - offset
expected_num_pages = num_items // limit + (1 if num_items % limit else 0)

num_pages = 0
page_args = None
for page_index, page_args in enumerate(iter_pagination_params(offset, limit)):

page_items, page_args.total_number_of_items = await get_page(
page_args.offset_current, page_args.limit
)

assert set(last_page) != set(page_items)
last_page = list(page_items)

# contains sub-sequence
assert str(page_items)[1:-1] in str(all_items)[1:-1]

num_pages = page_index + 1

assert last_page[-1] == all_items[-1]
assert num_pages == expected_num_pages

assert page_args is not None
assert not page_args.has_items_left()
assert page_args.total_number_of_pages() == num_pages


@pytest.mark.parametrize("limit", [-1, 0])
@pytest.mark.parametrize("offset", [-1])
def test_iter_pages_args_invalid(limit: int, offset: int):

with pytest.raises(ValidationError): # noqa: PT012
for _ in iter_pagination_params(offset=offset, limit=limit):
pass


def test_fails_if_total_number_of_items_not_set():
with pytest.raises( # noqa: PT012
RuntimeError,
match="page_args.total_number_of_items = total_count",
):
for _ in iter_pagination_params(limit=2):
pass


def test_fails_if_total_number_of_items_changes():
with pytest.raises( # noqa: PT012
RuntimeError,
match="total_number_of_items cannot change on every iteration",
):
for page_params in iter_pagination_params(limit=2, total_number_of_items=4):
assert page_params.total_number_of_items == 4
page_params.total_number_of_items += 1
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,6 @@ qx.Class.define("osparc.dashboard.StudyBrowserHeader", {
appearance: "danger-button",
allowGrowY: false,
alignY: "middle",
visibility: "excluded", // Not yet implemented
});
control.addListener("execute", () => this.fireEvent("emptyTrashRequested"));
this._addAt(control, this.self().POS.EMPTY_TRASH_BUTTON);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -458,8 +458,8 @@ qx.Class.define("osparc.data.Resources", {
"trash": {
endpoints: {
delete: {
method: "DELETE",
url: statics.API + "/trash"
method: "POST",
url: statics.API + "/trash:empty"
}
}
},
Expand Down
2 changes: 1 addition & 1 deletion services/web/server/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.57.0
0.58.0
2 changes: 1 addition & 1 deletion services/web/server/setup.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.57.0
current_version = 0.58.0
commit = True
message = services/webserver api version: {current_version} → {new_version}
tag = False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ openapi: 3.1.0
info:
title: simcore-service-webserver
description: Main service with an interface (http-API & websockets) to the web front-end
version: 0.57.0
version: 0.58.0
servers:
- url: ''
description: webserver
Expand Down Expand Up @@ -5973,8 +5973,8 @@ paths:
application/json:
schema:
$ref: '#/components/schemas/Envelope_FileUploadCompleteFutureResponse_'
/v0/trash:
delete:
/v0/trash:empty:
post:
tags:
- trash
summary: Empty Trash
Expand Down
Original file line number Diff line number Diff line change
@@ -1,21 +1,29 @@
import asyncio
import datetime
import logging

import arrow
from aiohttp import web
from common_library.pagination_tools import iter_pagination_params
from models_library.basic_types import IDStr
from models_library.products import ProductName
from models_library.projects import ProjectID
from models_library.rest_ordering import OrderBy, OrderDirection
from models_library.users import UserID
from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY
from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE
from servicelib.utils import fire_and_forget_task

from ..director_v2 import api as director_v2_api
from ..dynamic_scheduler import api as dynamic_scheduler_api
from . import projects_service
from . import _crud_api_read, projects_service
from ._access_rights_api import check_user_project_permission
from .exceptions import ProjectRunningConflictError
from .models import ProjectPatchInternalExtended
from .exceptions import (
ProjectNotFoundError,
ProjectNotTrashedError,
ProjectRunningConflictError,
)
from .models import ProjectDict, ProjectPatchInternalExtended

_logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -45,7 +53,7 @@ async def trash_project(
project_id: ProjectID,
force_stop_first: bool,
explicit: bool,
):
) -> None:
"""

Raises:
Expand Down Expand Up @@ -108,7 +116,7 @@ async def untrash_project(
product_name: ProductName,
user_id: UserID,
project_id: ProjectID,
):
) -> None:
# NOTE: check_user_project_permission is inside projects_api.patch_project
await projects_service.patch_project(
app,
Expand All @@ -119,3 +127,110 @@ async def untrash_project(
trashed_at=None, trashed_explicitly=False, trashed_by=None
),
)


def _can_delete(
project: ProjectDict,
user_id: UserID,
until_equal_datetime: datetime.datetime | None,
) -> bool:
"""
This is the current policy to delete trashed project

"""
trashed_at = project.get("trashed")
trashed_by = project.get("trashedBy")
trashed_explicitly = project.get("trashedExplicitly")

assert trashed_at is not None # nosec
assert trashed_by is not None # nosec

is_shared = len(project["accessRights"]) > 1

return bool(
trashed_at
and (until_equal_datetime is None or trashed_at < until_equal_datetime)
# NOTE: current policy is more restricted until
# logic is adapted to deal with the other cases
and trashed_by == user_id
and not is_shared
and trashed_explicitly
)


async def list_trashed_projects(
app: web.Application,
*,
product_name: ProductName,
user_id: UserID,
until_equal_datetime: datetime.datetime | None = None,
) -> list[ProjectID]:
"""
Lists all projects that were trashed until a specific datetime (if !=None).
"""
trashed_projects: list[ProjectID] = []

for page_params in iter_pagination_params(limit=100):
(
projects,
page_params.total_number_of_items,
) = await _crud_api_read.list_projects_full_depth(
app,
user_id=user_id,
product_name=product_name,
trashed=True,
tag_ids_list=[],
offset=page_params.offset,
limit=page_params.limit,
order_by=OrderBy(field=IDStr("trashed"), direction=OrderDirection.ASC),
search_by_multi_columns=None,
search_by_project_name=None,
)

# NOTE: Applying POST-FILTERING because we do not want to modify the interface of
# _crud_api_read.list_projects_full_depth at this time.
# This filtering couldn't be handled at the database level when `projects_repo`
# was refactored, as defining a custom trash_filter was needed to allow more
# flexibility in filtering options.
trashed_projects = [
project["uuid"]
for project in projects
if _can_delete(project, user_id, until_equal_datetime)
]
return trashed_projects


async def delete_trashed_project(
app: web.Application,
*,
user_id: UserID,
project_id: ProjectID,
until_equal_datetime: datetime.datetime | None = None,
) -> None:
"""
Deletes a project that was explicitly trashed by the user from a specific datetime (if provided, otherwise all).

Raises:
ProjectNotFoundError: If the project is not found.
ProjectNotTrashedError: If the project was not trashed explicitly by the user from the specified datetime.
"""
project = await projects_service.get_project_for_user(
app, project_uuid=f"{project_id}", user_id=user_id
)

if not project:
raise ProjectNotFoundError(project_uuid=project_id, user_id=user_id)

if not _can_delete(project, user_id, until_equal_datetime):
# safety check
raise ProjectNotTrashedError(
project_uuid=project_id,
user_id=user_id,
reason="Cannot delete trashed project since it does not fit current criteria",
)

await projects_service.delete_project_by_user(
app,
user_id=user_id,
project_uuid=project_id,
)
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,12 @@ class ProjectRunningConflictError(ProjectTrashError):
)


class ProjectNotTrashedError(ProjectTrashError):
msg_template = (
"Cannot delete project {project_uuid} since it was not trashed first: {reason}"
)


class NodeNotFoundError(BaseProjectError):
msg_template = "Node '{node_uuid}' not found in project '{project_uuid}'"

Expand Down
Loading
Loading