From 8ae2c705f237522f3639b97e75e5100c5cce22d8 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 09:41:14 +0200 Subject: [PATCH 01/35] Adds project document versioning and live update notifications Introduces a project document model with versioning, stored in Redis, and emits real-time update notifications to project rooms via socket.io when a project is patched. Updates Redis and docker-compose configs to support a dedicated documents database. Improves collaboration features by allowing clients to receive live project document changes. Relates to collaborative editing and project state synchronization features. --- README_pycrdt_test.md | 0 .../api_schemas_webserver/projects.py | 19 ++++ .../api_schemas_webserver/projects_ui.py | 5 +- .../api_schemas_webserver/socketio.py | 6 ++ .../src/settings_library/redis.py | 1 + services/docker-compose-ops.yml | 5 +- services/docker-compose.yml | 2 +- .../_controller/projects_states_rest.py | 17 ++++ .../projects/_jobs_repository.py | 2 +- .../projects/_projects_repository.py | 19 +++- .../projects/_projects_service.py | 69 ++++++++++++- .../projects/_socketio.py | 98 +++++++++++++++++++ .../projects/models.py | 5 + .../src/simcore_service_webserver/rabbitmq.py | 4 +- .../src/simcore_service_webserver/redis.py | 4 + .../socketio/_handlers.py | 7 +- .../socketio/messages.py | 17 ++++ .../unit/with_dbs/docker-compose-devel.yml | 5 +- 18 files changed, 269 insertions(+), 16 deletions(-) create mode 100644 README_pycrdt_test.md create mode 100644 services/web/server/src/simcore_service_webserver/projects/_socketio.py diff --git a/README_pycrdt_test.md b/README_pycrdt_test.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects.py b/packages/models-library/src/models_library/api_schemas_webserver/projects.py index 2b15e052944..8983659f14e 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects.py @@ -267,6 +267,25 @@ def to_domain_model(self) -> dict[str, Any]: return self.model_dump(exclude_unset=True, by_alias=False) +class ProjectDocument(OutputSchema): + uuid: ProjectID + workspace_id: WorkspaceID | None + name: str + description: str + thumbnail: HttpUrl | None + last_change_date: datetime + classifiers: list[ClassifierID] + dev: dict | None + quality: dict[str, Any] + workbench: NodesDict + ui: StudyUI | None + type: ProjectType + template_type: ProjectTemplateType | None + + # config + model_config = ConfigDict(from_attributes=True, arbitrary_types_allowed=True) + + __all__: tuple[str, ...] = ( "EmptyModel", "ProjectCopyOverride", diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_ui.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_ui.py index 2ecf809870a..e912a621acc 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects_ui.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_ui.py @@ -91,13 +91,14 @@ def _update_json_schema_extra(schema: JsonDict) -> None: class StudyUI(OutputSchema): # Model fully controlled by the UI and stored under `projects.ui` - icon: HttpUrl | None = None + icon: HttpUrl | None = None # <-- Deprecated workbench: dict[NodeIDStr, WorkbenchUI] | None = None slideshow: dict[NodeIDStr, SlideshowUI] | None = None current_node_id: NodeID | None = None annotations: dict[NodeIDStr, AnnotationUI] | None = None - template_type: Literal["hypertool"] | None = None + template_type: Literal["hypertool"] | None = None # <-- Deprecated + mode: Literal["workbench", "app", "guided", "standalone", "pipeline"] | None = None _empty_is_none = field_validator("*", mode="before")( empty_str_to_none_pre_validator diff --git a/packages/models-library/src/models_library/api_schemas_webserver/socketio.py b/packages/models-library/src/models_library/api_schemas_webserver/socketio.py index 6e3f987198a..d5eb5456bd7 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/socketio.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/socketio.py @@ -1,3 +1,5 @@ +from models_library.projects import ProjectID + from ..basic_types import IDStr from ..groups import GroupID from ..users import UserID @@ -15,3 +17,7 @@ def from_group_id(cls, group_id: GroupID) -> "SocketIORoomStr": @classmethod def from_user_id(cls, user_id: UserID) -> "SocketIORoomStr": return cls(f"user:{user_id}") + + @classmethod + def from_project_id(cls, project_id: ProjectID) -> "SocketIORoomStr": + return cls(f"project:{project_id}") diff --git a/packages/settings-library/src/settings_library/redis.py b/packages/settings-library/src/settings_library/redis.py index 40dd88aabf9..63e64fce449 100644 --- a/packages/settings-library/src/settings_library/redis.py +++ b/packages/settings-library/src/settings_library/redis.py @@ -19,6 +19,7 @@ class RedisDatabase(IntEnum): DEFERRED_TASKS = 7 DYNAMIC_SERVICES = 8 CELERY_TASKS = 9 + DOCUMENTS = 10 class RedisSettings(BaseCustomSettings): diff --git a/services/docker-compose-ops.yml b/services/docker-compose-ops.yml index 3db7af6aed0..dd15dd1ecbd 100644 --- a/services/docker-compose-ops.yml +++ b/services/docker-compose-ops.yml @@ -94,8 +94,9 @@ services: announcements:${REDIS_HOST}:${REDIS_PORT}:5:${REDIS_PASSWORD}, distributed_identifiers:${REDIS_HOST}:${REDIS_PORT}:6:${REDIS_PASSWORD}, deferred_tasks:${REDIS_HOST}:${REDIS_PORT}:7:${REDIS_PASSWORD}, - dynamic_services:${REDIS_HOST}:${REDIS_PORT}:8:${REDIS_PASSWORD} - celery_tasks:${REDIS_HOST}:${REDIS_PORT}:9:${REDIS_PASSWORD} + dynamic_services:${REDIS_HOST}:${REDIS_PORT}:8:${REDIS_PASSWORD}, + celery_tasks:${REDIS_HOST}:${REDIS_PORT}:9:${REDIS_PASSWORD}, + documents:${REDIS_HOST}:${REDIS_PORT}:10:${REDIS_PASSWORD} # If you add/remove a db, do not forget to update the --databases entry in the docker-compose.yml ports: - "18081:8081" diff --git a/services/docker-compose.yml b/services/docker-compose.yml index ecb72a93469..d2f19fcdd06 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -1379,7 +1379,7 @@ services: # also aof (append only) is also enabled such that we get full durability at the expense # of backup size. The backup is written into /data. # https://redis.io/topics/persistence - [ "redis-server", "--save", "60 1", "--loglevel", "verbose", "--databases", "10", "--appendonly", "yes", "--requirepass", "${REDIS_PASSWORD}" ] + [ "redis-server", "--save", "60 1", "--loglevel", "verbose", "--databases", "11", "--appendonly", "yes", "--requirepass", "${REDIS_PASSWORD}" ] networks: - default - autoscaling_subnet diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py index 627dce27f0f..564e34516f9 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py @@ -4,6 +4,7 @@ from aiohttp import web from models_library.api_schemas_webserver.projects import ProjectGet +from models_library.api_schemas_webserver.socketio import SocketIORoomStr from models_library.projects_state import ProjectState from pydantic import BaseModel from servicelib.aiohttp import status @@ -18,6 +19,8 @@ ) from simcore_postgres_database.models.users import UserRole from simcore_postgres_database.webserver_models import ProjectType +from simcore_service_webserver.resource_manager.user_sessions import managed_resource +from simcore_service_webserver.socketio.server import get_socket_server from ..._meta import API_VTAG as VTAG from ...director_v2.exceptions import DirectorV2ServiceError @@ -103,6 +106,20 @@ async def open_project(request: web.Request) -> web.Response: ): raise HTTPLockedError(text="Project is locked, try later") + # Connect the socket_id to a project room + with managed_resource( + req_ctx.user_id, client_session_id, request.app + ) as resource_registry: + _socket_id = await resource_registry.get_socket_id() + if _socket_id is None: + raise web.HTTPBadRequest( + text="Cannot open project without a socket_id, please refresh the page" + ) + sio = get_socket_server(request.app) + await sio.enter_room( + _socket_id, SocketIORoomStr.from_project_id(path_params.project_id) + ) + # the project can be opened, let's update its product links await _projects_service.update_project_linked_product( request.app, path_params.project_id, req_ctx.product_name diff --git a/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py b/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py index 3b060be9a23..dca8d1c91cc 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py @@ -190,7 +190,7 @@ async def list_projects_marked_as_jobs( .offset(pagination_offset) ) - # Step 5: Execute queries + # Step 7: Execute queries async with pass_or_acquire_connection(self.engine, connection) as conn: total_count = await conn.scalar(total_query) assert isinstance(total_count, int) # nosec diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py index 705e4970ee7..556c1b59450 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py @@ -25,7 +25,7 @@ from ..db.plugin import get_asyncpg_engine from .exceptions import ProjectNotFoundError -from .models import ProjectDBGet +from .models import ProjectDBGet, ProjectWithWorkbenchDBGet _logger = logging.getLogger(__name__) @@ -115,6 +115,23 @@ async def get_project( return ProjectDBGet.model_validate(row) +async def get_project_with_workbench( + app: web.Application, + connection: AsyncConnection | None = None, + *, + project_uuid: ProjectID, +) -> ProjectWithWorkbenchDBGet: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + query = sql.select(*PROJECT_DB_COLS, projects.c.workbench).where( + projects.c.uuid == f"{project_uuid}" + ) + result = await conn.execute(query) + row = result.one_or_none() + if row is None: + raise ProjectNotFoundError(project_uuid=project_uuid) + return ProjectWithWorkbenchDBGet.model_validate(row) + + async def batch_get_project_name( app: web.Application, connection: AsyncConnection | None = None, diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 6152340916d..ee843c572a2 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -29,12 +29,18 @@ DynamicServiceStart, DynamicServiceStop, ) -from models_library.api_schemas_webserver.projects import ProjectPatch +from models_library.api_schemas_webserver.projects import ProjectDocument, ProjectPatch from models_library.basic_types import KeyIDStr from models_library.errors import ErrorDict from models_library.groups import GroupID from models_library.products import ProductName -from models_library.projects import Project, ProjectID, ProjectIDStr +from models_library.projects import ( + Project, + ProjectID, + ProjectIDStr, + ProjectTemplateType, +) +from models_library.projects import ProjectType as ProjectTypeAPI from models_library.projects_access import Owner from models_library.projects_nodes import Node, NodeState, PartialNode from models_library.projects_nodes_io import NodeID, NodeIDStr, PortLink @@ -98,7 +104,10 @@ from ..dynamic_scheduler import api as dynamic_scheduler_service from ..products import products_web from ..rabbitmq import get_rabbitmq_rpc_client -from ..redis import get_redis_lock_manager_client_sdk +from ..redis import ( + get_redis_document_manager_client_sdk, + get_redis_lock_manager_client_sdk, +) from ..resource_manager.user_sessions import ( PROJECT_ID_KEY, UserSessionID, @@ -136,6 +145,7 @@ from ._nodes_utils import set_reservation_same_as_limit, validate_new_service_resources from ._projects_repository_legacy import APP_PROJECT_DBAPI, ProjectDBAPI from ._projects_repository_legacy_utils import PermissionStr +from ._socketio import notify_project_document_updated from .exceptions import ( ClustersKeeperNotAvailableError, DefaultPricingUnitNotFoundError, @@ -160,6 +170,31 @@ log = logging.getLogger(__name__) PROJECT_REDIS_LOCK_KEY: str = "project:{}" +PROJECT_DOCUMENT_VERSION_KEY: str = "projects:{}:version" + + +async def _get_and_increment_project_document_version( + app: web.Application, project_uuid: ProjectID +) -> int: + """ + Atomically gets and increments the project document version using Redis. + + This function ensures thread-safe version incrementing by using Redis INCR command + which is atomic. The version starts at 1 for the first call. + + Args: + app: The web application instance + project_uuid: The project UUID + + Returns: + The new incremented version number + """ + redis_client_sdk = get_redis_document_manager_client_sdk(app) + version_key = PROJECT_DOCUMENT_VERSION_KEY.format(project_uuid) + + # Redis INCR is atomic and returns the new value + # If key doesn't exist, it's created with value 0 and then incremented to 1 + return await redis_client_sdk.redis.incr(version_key) def _is_node_dynamic(node_key: str) -> bool: @@ -355,6 +390,34 @@ async def patch_project( project_uuid=project_uuid, new_partial_project_data=patch_project_data, ) + # 6. Notify users involved in the project + project_with_workbench = await _projects_repository.get_project_with_workbench( + app=app, project_uuid=project_uuid + ) + project_document = ProjectDocument( + uuid=project_with_workbench.uuid, + workspace_id=project_with_workbench.workspace_id, + name=project_with_workbench.name, + description=project_with_workbench.description, + thumbnail=project_with_workbench.thumbnail, + last_change_date=project_with_workbench.last_change_date, + classifiers=project_with_workbench.classifiers, + dev=project_with_workbench.dev, + quality=project_with_workbench.quality, + workbench=project_with_workbench.workbench, + ui=project_with_workbench.ui, + type=ProjectTypeAPI(project_with_workbench.type), + template_type=ProjectTemplateType(project_with_workbench.template_type), + ) + document_version = await _get_and_increment_project_document_version( + app=app, project_uuid=project_uuid + ) + await notify_project_document_updated( + app=app, + project_id=project_uuid, + version=document_version, + document=project_document, + ) # diff --git a/services/web/server/src/simcore_service_webserver/projects/_socketio.py b/services/web/server/src/simcore_service_webserver/projects/_socketio.py new file mode 100644 index 00000000000..6352354237b --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_socketio.py @@ -0,0 +1,98 @@ +from typing import Final + +from aiohttp import web +from models_library.api_schemas_webserver.projects import ProjectDocument +from models_library.projects import ProjectID +from models_library.socketio import SocketMessageDict +from pydantic import AliasGenerator, BaseModel, ConfigDict +from pydantic.alias_generators import to_camel + +from ..socketio.messages import send_message_to_project_room + +# SOCKET_IO_PROJECT_CREATED_EVENT: Final[str] = "projectDocument:created" +# SOCKET_IO_PROJECT_DELETED_EVENT: Final[str] = "projectDocument:deleted" +SOCKET_IO_PROJECT_DOCUMENT_UPDATED_EVENT: Final[str] = "projectDocument:updated" + + +class BaseEvent(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + from_attributes=True, + alias_generator=AliasGenerator( + serialization_alias=to_camel, + ), + ) + + +class ProjectDocumentEvent(BaseEvent): + project_id: ProjectID + version: int + document: ProjectDocument + + +# async def notify_project_created( +# app: web.Application, +# *, +# project_id: ProjectID, +# product_name: ProductName, +# user_group_id: GroupID, +# project_name: str, +# created: datetime.datetime, +# modified: datetime.datetime, +# ) -> None: +# notification_message = SocketMessageDict( +# event_type=SOCKET_IO_PROJECT_CREATED_EVENT, +# data={ +# **ProjectCreatedOrUpdatedEvent( +# product_name=product_name, +# project_id=project_id, +# user_group_id=user_group_id, +# name=project_name, +# created=created, +# modified=modified, +# ).model_dump(mode="json", by_alias=True), +# }, +# ) + +# await send_message_to_project_room(app, project_id, notification_message) + + +async def notify_project_document_updated( + app: web.Application, + *, + project_id: ProjectID, + version: int, + document: ProjectDocument, +) -> None: + notification_message = SocketMessageDict( + event_type=SOCKET_IO_PROJECT_DOCUMENT_UPDATED_EVENT, + data={ + **ProjectDocumentEvent( + project_id=project_id, + version=version, + document=document, + ).model_dump(mode="json", by_alias=True), + }, + ) + await send_message_to_project_room(app, project_id, notification_message) + + +# async def notify_project_deleted( +# app: web.Application, +# *, +# project_id: ProjectID, +# product_name: ProductName, +# user_group_id: GroupID, +# ) -> None: +# notification_message = SocketMessageDict( +# event_type=SOCKET_IO_PROJECT_DELETED_EVENT, +# data={ +# **ProjectDeletedEvent( +# product_name=product_name, +# project_id=project_id, +# user_group_id=user_group_id, +# ).model_dump(mode="json", by_alias=True), +# }, +# ) + +# await send_message_to_project_room(app, project_id, notification_message) diff --git a/services/web/server/src/simcore_service_webserver/projects/models.py b/services/web/server/src/simcore_service_webserver/projects/models.py index e754bde1cf2..23225da3d76 100644 --- a/services/web/server/src/simcore_service_webserver/projects/models.py +++ b/services/web/server/src/simcore_service_webserver/projects/models.py @@ -72,6 +72,11 @@ class ProjectDBGet(BaseModel): ) +class ProjectWithWorkbenchDBGet(ProjectDBGet): + # This model is used to read the project with its workbench + workbench: NodesDict + + class ProjectJobDBGet(ProjectDBGet): workbench: NodesDict diff --git a/services/web/server/src/simcore_service_webserver/rabbitmq.py b/services/web/server/src/simcore_service_webserver/rabbitmq.py index c415c53057f..95852012541 100644 --- a/services/web/server/src/simcore_service_webserver/rabbitmq.py +++ b/services/web/server/src/simcore_service_webserver/rabbitmq.py @@ -47,7 +47,9 @@ async def _rabbitmq_client_cleanup_ctx(app: web.Application) -> AsyncIterator[No # injects healthcheck healthcheck: HealthCheck = app[HealthCheck.__name__] - healthcheck.on_healthcheck.append(_on_healthcheck_async_adapter) + healthcheck.on_healthcheck.append( + _on_healthcheck_async_adapter + ) # <-- MD: check here! yield diff --git a/services/web/server/src/simcore_service_webserver/redis.py b/services/web/server/src/simcore_service_webserver/redis.py index cd66a4e004d..bb48a987a9c 100644 --- a/services/web/server/src/simcore_service_webserver/redis.py +++ b/services/web/server/src/simcore_service_webserver/redis.py @@ -81,6 +81,10 @@ def get_redis_lock_manager_client_sdk(app: web.Application) -> RedisClientSDK: return _get_redis_client_sdk(app, RedisDatabase.LOCKS) +def get_redis_document_manager_client_sdk(app: web.Application) -> RedisClientSDK: + return _get_redis_client_sdk(app, RedisDatabase.DOCUMENTS) + + def get_redis_validation_code_client(app: web.Application) -> aioredis.Redis: redis_client: aioredis.Redis = _get_redis_client_sdk( app, RedisDatabase.VALIDATION_CODES diff --git a/services/web/server/src/simcore_service_webserver/socketio/_handlers.py b/services/web/server/src/simcore_service_webserver/socketio/_handlers.py index ecf576dfc48..235e13338d7 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/socketio/_handlers.py @@ -92,10 +92,9 @@ async def _set_user_in_group_rooms( sio = get_socket_server(app) for gid in group_ids: - # NOTE socketio need to be upgraded that's why enter_room is not an awaitable - sio.enter_room(socket_id, SocketIORoomStr.from_group_id(gid)) + await sio.enter_room(socket_id, SocketIORoomStr.from_group_id(gid)) - sio.enter_room(socket_id, SocketIORoomStr.from_user_id(user_id)) + await sio.enter_room(socket_id, SocketIORoomStr.from_user_id(user_id)) # @@ -104,7 +103,7 @@ async def _set_user_in_group_rooms( @register_socketio_handler -async def connect( +async def connect( # <- MD: here the frontend connects to the socket.io server socket_id: SocketID, environ: EnvironDict, app: web.Application ) -> bool: """socketio reserved handler for when the fontend connects through socket.io diff --git a/services/web/server/src/simcore_service_webserver/socketio/messages.py b/services/web/server/src/simcore_service_webserver/socketio/messages.py index 2b20fc4b578..c4680da0599 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/messages.py +++ b/services/web/server/src/simcore_service_webserver/socketio/messages.py @@ -8,6 +8,7 @@ from aiohttp.web import Application from models_library.api_schemas_webserver.socketio import SocketIORoomStr from models_library.groups import GroupID +from models_library.projects import ProjectID from models_library.socketio import SocketMessageDict from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder @@ -29,6 +30,7 @@ SOCKET_IO_NODE_UPDATED_EVENT: Final[str] = "nodeUpdated" SOCKET_IO_PROJECT_UPDATED_EVENT: Final[str] = "projectStateUpdated" +# SOCKET_IO_PROJECT_STORE_UPDATED_EVENT: Final[str] = "projectStoreUpdated" SOCKET_IO_WALLET_OSPARC_CREDITS_UPDATED_EVENT: Final[str] = "walletOsparcCreditsUpdated" @@ -100,3 +102,18 @@ async def send_message_to_standard_group( # that might be connected to different replicas ignore_queue=False, ) + + +async def send_message_to_project_room( + app: Application, + project_id: ProjectID, + message: SocketMessageDict, +) -> None: + sio: AsyncServer = get_socket_server(app) + + await _safe_emit( + sio, + room=SocketIORoomStr.from_project_id(project_id), + message=message, + ignore_queue=False, + ) diff --git a/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml b/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml index 16f5037efc2..faae7d7c6cd 100644 --- a/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml +++ b/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml @@ -83,7 +83,10 @@ services: user_notifications:redis:6379:4:${TEST_REDIS_PASSWORD}, announcements:redis:6379:5:${TEST_REDIS_PASSWORD}, distributed_identifiers:redis:6379:6:${TEST_REDIS_PASSWORD}, - deferred_tasks:redis:6379:7:${TEST_REDIS_PASSWORD} + deferred_tasks:redis:6379:7:${TEST_REDIS_PASSWORD}, + dynamic_services:${REDIS_HOST}:${REDIS_PORT}:8:${TEST_REDIS_PASSWORD}, + celery_tasks:${REDIS_HOST}:${REDIS_PORT}:9:${TEST_REDIS_PASSWORD}, + documents:redis:6379:10:${TEST_REDIS_PASSWORD} ports: - "18081:8081" From 04af57e670216273121d2f7935dc5893a36eeeb2 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 10:32:37 +0200 Subject: [PATCH 02/35] Removes await from non-async socket.io enter_room calls Updates socket.io room joining to reflect non-awaitable enter_room method, preparing for upcoming dependency upgrade. Also adds DOCUMENTS to Redis database setup and improves type casting for project types. Improves code clarity and ensures future compatibility. --- .../projects/_controller/projects_states_rest.py | 2 +- .../simcore_service_webserver/projects/_projects_service.py | 4 ++-- services/web/server/src/simcore_service_webserver/redis.py | 1 + .../src/simcore_service_webserver/socketio/_handlers.py | 5 +++-- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py index 564e34516f9..7a560d45dd7 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py @@ -116,7 +116,7 @@ async def open_project(request: web.Request) -> web.Response: text="Cannot open project without a socket_id, please refresh the page" ) sio = get_socket_server(request.app) - await sio.enter_room( + sio.enter_room( _socket_id, SocketIORoomStr.from_project_id(path_params.project_id) ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index ee843c572a2..8851a1c9c35 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -406,8 +406,8 @@ async def patch_project( quality=project_with_workbench.quality, workbench=project_with_workbench.workbench, ui=project_with_workbench.ui, - type=ProjectTypeAPI(project_with_workbench.type), - template_type=ProjectTemplateType(project_with_workbench.template_type), + type=cast(ProjectTypeAPI, project_with_workbench.type), + template_type=cast(ProjectTemplateType, project_with_workbench.template_type), ) document_version = await _get_and_increment_project_document_version( app=app, project_uuid=project_uuid diff --git a/services/web/server/src/simcore_service_webserver/redis.py b/services/web/server/src/simcore_service_webserver/redis.py index bb48a987a9c..e9e50e1d36e 100644 --- a/services/web/server/src/simcore_service_webserver/redis.py +++ b/services/web/server/src/simcore_service_webserver/redis.py @@ -42,6 +42,7 @@ async def setup_redis_client(app: web.Application): RedisDatabase.SCHEDULED_MAINTENANCE, RedisDatabase.USER_NOTIFICATIONS, RedisDatabase.ANNOUNCEMENTS, + RedisDatabase.DOCUMENTS, ) }, settings=redis_settings, diff --git a/services/web/server/src/simcore_service_webserver/socketio/_handlers.py b/services/web/server/src/simcore_service_webserver/socketio/_handlers.py index 235e13338d7..c1312228420 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/socketio/_handlers.py @@ -92,9 +92,10 @@ async def _set_user_in_group_rooms( sio = get_socket_server(app) for gid in group_ids: - await sio.enter_room(socket_id, SocketIORoomStr.from_group_id(gid)) + # NOTE socketio need to be upgraded that's why enter_room is not an awaitable + sio.enter_room(socket_id, SocketIORoomStr.from_group_id(gid)) - await sio.enter_room(socket_id, SocketIORoomStr.from_user_id(user_id)) + sio.enter_room(socket_id, SocketIORoomStr.from_user_id(user_id)) # From 58519f3abc79c4cf4590fdb487f9b044dc6431e7 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 14:23:12 +0200 Subject: [PATCH 03/35] Refactors project patching to centralize versioned updates Introduces a unified function for patching projects with versioning and notification, ensuring atomic updates and consistent user alerts. Removes legacy methods, simplifies project API, and updates usages across modules for better maintainability and reduced duplication. --- .../folders/_workspaces_repository.py | 10 +- .../projects/_controller/projects_rest.py | 2 +- .../projects/_crud_api_create.py | 22 +-- .../projects/_crud_api_delete.py | 7 +- .../projects/_projects_repository_legacy.py | 131 +----------------- .../projects/_projects_service.py | 127 ++++++++++++----- .../projects/_projects_service_delete.py | 1 + .../projects/_socketio.py | 3 + .../projects/_trash_service.py | 4 +- .../projects/_workspaces_service.py | 10 +- .../simcore_service_webserver/projects/api.py | 2 + .../tests/unit/with_dbs/03/test_project_db.py | 85 ------------ 12 files changed, 132 insertions(+), 272 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/folders/_workspaces_repository.py b/services/web/server/src/simcore_service_webserver/folders/_workspaces_repository.py index 3b8951deab7..8fdb5e700a6 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_workspaces_repository.py +++ b/services/web/server/src/simcore_service_webserver/folders/_workspaces_repository.py @@ -10,8 +10,8 @@ from ..db.plugin import get_asyncpg_engine from ..projects import _folders_repository as projects_folders_repository from ..projects import _groups_repository as projects_groups_repository -from ..projects import _projects_repository as _projects_repository from ..projects._access_rights_service import check_user_project_permission +from ..projects.api import patch_project_and_notify_users from ..users import users_service from ..workspaces.api import check_user_workspace_access from . import _folders_repository @@ -75,14 +75,15 @@ async def move_folder_into_workspace( # ⬆️ Here we have already guaranties that user has all the right permissions to do this operation ⬆️ + user: dict = await users_service.get_user(app, user_id) async with transaction_context(get_asyncpg_engine(app)) as conn: # 4. Update workspace ID on the project resource for project_id in project_ids: - await _projects_repository.patch_project( + await patch_project_and_notify_users( app=app, - connection=conn, project_uuid=project_id, - new_partial_project_data={"workspace_id": workspace_id}, + patch_project_data={"workspace_id": workspace_id}, + user_primary_gid=user["primary_gid"], ) # 5. BATCH update of folders with workspace_id @@ -122,7 +123,6 @@ async def move_folder_into_workspace( ) # 9. Remove all project permissions, leave only the user who moved the project - user = await users_service.get_user(app, user_id=user_id) for project_id in project_ids: await projects_groups_repository.delete_all_project_groups( app, connection=conn, project_id=project_id diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py index 5edaa322b8b..1d97c3920ab 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py @@ -314,7 +314,7 @@ async def patch_project(request: web.Request): path_params = parse_request_path_parameters_as(ProjectPathParams, request) project_patch = await parse_request_body_as(ProjectPatch, request) - await _projects_service.patch_project( + await _projects_service.patch_project_for_user( request.app, user_id=req_ctx.user_id, project_uuid=path_params.project_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py index 506312ef10b..6aca8fa5881 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py @@ -41,7 +41,7 @@ from ..users import users_service from ..workspaces.api import check_user_workspace_access, get_user_workspace from ..workspaces.errors import WorkspaceAccessForbiddenError -from . import _folders_repository, _projects_service +from . import _folders_repository, _projects_repository, _projects_service from ._metadata_service import set_project_ancestors from ._permalink_service import update_or_pop_permalink_in_project from ._projects_repository_legacy import ProjectDBAPI @@ -162,10 +162,10 @@ async def _copy_files_from_source_project( product_name: str, task_progress: TaskProgress, ): - _projects_repository = ProjectDBAPI.get_from_app_context(app) + _projects_repository_legacy = ProjectDBAPI.get_from_app_context(app) needs_lock_source_project: bool = ( - await _projects_repository.get_project_type( + await _projects_repository_legacy.get_project_type( TypeAdapter(ProjectID).validate_python(source_project["uuid"]) ) != ProjectTypeDB.TEMPLATE @@ -294,7 +294,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche f"{from_study=}", ) - _projects_repository = ProjectDBAPI.get_from_app_context(request.app) + _projects_repository_legacy = ProjectDBAPI.get_from_app_context(request.app) new_project: ProjectDict = {} copy_file_coro = None @@ -372,7 +372,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche ) # 3.1 save new project in DB - new_project = await _projects_repository.insert_project( + new_project = await _projects_repository_legacy.insert_project( project=jsonable_encoder(new_project), user_id=user_id, product_name=product_name, @@ -408,8 +408,10 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche # 5. unhide the project if needed since it is now complete if not new_project_was_hidden_before_data_was_copied: - await _projects_repository.set_hidden_flag( - new_project["uuid"], hidden=False + await _projects_repository.patch_project( + request.app, + project_uuid=new_project["uuid"], + new_partial_project_data={"hidden": False}, ) # update the network information in director-v2 @@ -427,7 +429,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche product_api_base_url, ) # get the latest state of the project (lastChangeDate for instance) - new_project, _ = await _projects_repository.get_project_dict_and_type( + new_project, _ = await _projects_repository_legacy.get_project_dict_and_type( project_uuid=new_project["uuid"] ) # Appends state @@ -444,7 +446,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche # Adds folderId user_specific_project_data_db = ( - await _projects_repository.get_user_specific_project_data_db( + await _projects_repository_legacy.get_user_specific_project_data_db( project_uuid=new_project["uuid"], private_workspace_user_id_or_none=( user_id if workspace_id is None else None @@ -467,7 +469,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche for gid, access in workspace.access_rights.items() } - _project_product_name = await _projects_repository.get_project_product( + _project_product_name = await _projects_repository_legacy.get_project_product( project_uuid=new_project["uuid"] ) assert ( diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py index 5309c67cb49..83c6305f310 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py @@ -17,6 +17,7 @@ from ..storage.api import delete_data_folders_of_project from ..users.exceptions import UserNotFoundError from ..users.users_service import FullNameDict +from . import _projects_repository from ._access_rights_service import check_user_project_permission from ._projects_repository_legacy import ProjectDBAPI from .exceptions import ( @@ -70,7 +71,11 @@ async def mark_project_as_deleted( # NOTE: if any of the steps below fail, it might results in a # services/projects/data that might be incosistent. The GC should # be able to detect that and resolve it. - await db.set_hidden_flag(project_uuid, hidden=True) + await _projects_repository.patch_project( + app, + project_uuid=project_uuid, + new_partial_project_data={"hidden": True}, + ) async def delete_project( diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py index f9a2340db76..b9d8a774818 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py @@ -21,7 +21,6 @@ from models_library.products import ProductName from models_library.projects import ( ProjectID, - ProjectIDStr, ProjectListAtDB, ) from models_library.projects_comments import CommentID, ProjectsCommentsDB @@ -93,7 +92,6 @@ convert_to_schema_names, create_project_access_rights, patch_workbench, - update_workbench, ) from .exceptions import ( ProjectDeleteError, @@ -809,86 +807,6 @@ async def get_pure_project_access_rights_without_workspace( ) return UserProjectAccessRightsDB.model_validate(row) - async def replace_project( - self, - new_project_data: ProjectDict, - user_id: UserID, - *, - product_name: str, - project_uuid: str, - ) -> ProjectDict: - """ - replaces a project from a user - this method completely replaces a user project with new_project_data only keeping - the old entries from the project workbench if they exists in the new project workbench. - NOTE: This method does not allow to add or remove nodes. use add_project_node - or remove_project_node to achieve this. - - Raises: - ProjectInvalidRightsError - ProjectInvalidUsageError in case nodes are added/removed, use add_project_node/remove_project_node - """ - - async with AsyncExitStack() as stack: - stack.enter_context( - log_context( - _logger, - logging.DEBUG, - msg=f"Replace {project_uuid=} for {user_id=}", - extra=get_log_record_extra(user_id=user_id), - ) - ) - db_connection = await stack.enter_async_context(self.engine.acquire()) - await stack.enter_async_context(db_connection.begin()) - - current_project: dict = await self._get_project( - db_connection, - project_uuid, - exclude_foreign=["tags"], - for_update=True, - ) - - # uuid can ONLY be set upon creation - if current_project["uuid"] != new_project_data["uuid"]: - raise ProjectInvalidRightsError( - user_id=user_id, project_uuid=new_project_data["uuid"] - ) - # ensure the prj owner is always in the access rights - owner_primary_gid = await self._get_user_primary_group_gid( - db_connection, current_project[projects.c.prj_owner.key] - ) - new_project_data.setdefault("accessRights", {}).update( - create_project_access_rights( - owner_primary_gid, ProjectAccessRights.OWNER - ) - ) - new_project_data = update_workbench(current_project, new_project_data) - # update timestamps - new_project_data["lastChangeDate"] = now_str() - - # now update it - result = await db_connection.execute( - # pylint: disable=no-value-for-parameter - projects.update() - .values(**convert_to_db_names(new_project_data)) - .where(projects.c.id == current_project[projects.c.id.key]) - .returning(literal_column("*")) - ) - project = await result.fetchone() - assert project # nosec - await self.upsert_project_linked_product( - ProjectID(project_uuid), product_name, conn=db_connection - ) - - user_email = await self._get_user_email(db_connection, project.prj_owner) - - tags = await self._get_tags_by_project( - db_connection, project_id=project[projects.c.id] - ) - return convert_to_schema_names(project, user_email, tags=tags) - msg = "linter unhappy without this" - raise RuntimeError(msg) - async def get_project_product(self, project_uuid: ProjectID) -> ProductName: async with self.engine.acquire() as conn: result = await conn.execute( @@ -901,40 +819,12 @@ async def get_project_product(self, project_uuid: ProjectID) -> ProductName: raise ProjectNotFoundError(project_uuid=project_uuid) return cast(str, row[0]) - async def update_project_owner_without_checking_permissions( - self, - project_uuid: ProjectIDStr, - *, - new_project_owner: UserID, - new_project_access_rights: dict, - ) -> None: - """The garbage collector needs to alter the row without passing through the - permissions layer (sic).""" - async with self.engine.acquire() as conn: - # now update it - result: ResultProxy = await conn.execute( - projects.update() - .values( - prj_owner=new_project_owner, - access_rights=new_project_access_rights, - last_change_date=now_str(), - ) - .where(projects.c.uuid == project_uuid) - ) - result_row_count: int = result.rowcount - assert result_row_count == 1 # nosec - - async def update_project_last_change_timestamp(self, project_uuid: ProjectIDStr): - async with self.engine.acquire() as conn: - result = await conn.execute( - # pylint: disable=no-value-for-parameter - projects.update() - .values(last_change_date=now_str()) - .where(projects.c.uuid == f"{project_uuid}") - ) - if result.rowcount == 0: - raise ProjectNotFoundError(project_uuid=project_uuid) - + # @exclusive( + # get_redis_lock_manager_client_sdk(app), + # lock_key=PROJECT_DB_UPDATE_REDIS_LOCK_KEY.format(project_uuid), + # blocking=True, + # blocking_timeout=datetime.timedelta(seconds=30), + # ) async def delete_project(self, user_id: int, project_uuid: str): _logger.info( "Deleting project with %s for user with %s", @@ -1372,15 +1262,6 @@ async def is_hidden(self, project_uuid: ProjectID) -> bool: ) return bool(result) - async def set_hidden_flag(self, project_uuid: ProjectID, *, hidden: bool): - async with self.engine.acquire() as conn: - stmt = ( - projects.update() - .values(hidden=hidden) - .where(projects.c.uuid == f"{project_uuid}") - ) - await conn.execute(stmt) - # # Project TYPE column # diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 8851a1c9c35..2db61c26a1a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -37,7 +37,6 @@ from models_library.projects import ( Project, ProjectID, - ProjectIDStr, ProjectTemplateType, ) from models_library.projects import ProjectType as ProjectTypeAPI @@ -169,8 +168,8 @@ log = logging.getLogger(__name__) -PROJECT_REDIS_LOCK_KEY: str = "project:{}" PROJECT_DOCUMENT_VERSION_KEY: str = "projects:{}:version" +PROJECT_DB_UPDATE_REDIS_LOCK_KEY: str = "project_db_update:{}" async def _get_and_increment_project_document_version( @@ -197,6 +196,80 @@ async def _get_and_increment_project_document_version( return await redis_client_sdk.redis.incr(version_key) +async def patch_project_and_notify_users( + app: web.Application, + *, + project_uuid: ProjectID, + patch_project_data: dict[str, Any], + user_primary_gid: GroupID, +) -> None: + """ + Patches a project and notifies users involved in the project with version control. + + This function performs the following operations atomically: + 1. Patches the project in the database + 2. Retrieves the updated project with workbench + 3. Creates a project document + 4. Increments the document version + 5. Notifies users about the project update + + Args: + app: The web application instance + project_uuid: The project UUID to patch + patch_project_data: Dictionary containing the project data to patch + user_primary_gid: Primary group ID of the user making the change + + Note: + This function is decorated with Redis exclusive lock to ensure + thread-safe operations on the project document. + """ + + @exclusive( + get_redis_lock_manager_client_sdk(app), + lock_key=PROJECT_DB_UPDATE_REDIS_LOCK_KEY.format(project_uuid), + blocking=True, + blocking_timeout=datetime.timedelta(seconds=30), + ) + async def _patch_and_notify() -> None: + await _projects_repository.patch_project( + app=app, + project_uuid=project_uuid, + new_partial_project_data=patch_project_data, + ) + project_with_workbench = await _projects_repository.get_project_with_workbench( + app=app, project_uuid=project_uuid + ) + project_document = ProjectDocument( + uuid=project_with_workbench.uuid, + workspace_id=project_with_workbench.workspace_id, + name=project_with_workbench.name, + description=project_with_workbench.description, + thumbnail=project_with_workbench.thumbnail, + last_change_date=project_with_workbench.last_change_date, + classifiers=project_with_workbench.classifiers, + dev=project_with_workbench.dev, + quality=project_with_workbench.quality, + workbench=project_with_workbench.workbench, + ui=project_with_workbench.ui, + type=cast(ProjectTypeAPI, project_with_workbench.type), + template_type=cast( + ProjectTemplateType, project_with_workbench.template_type + ), + ) + document_version = await _get_and_increment_project_document_version( + app=app, project_uuid=project_uuid + ) + await notify_project_document_updated( + app=app, + project_id=project_uuid, + user_primary_gid=user_primary_gid, + version=document_version, + document=project_document, + ) + + await _patch_and_notify() + + def _is_node_dynamic(node_key: str) -> bool: return "/dynamic/" in node_key @@ -318,12 +391,14 @@ async def batch_get_project_name( async def update_project_last_change_timestamp( app: web.Application, project_uuid: ProjectID ): - db: ProjectDBAPI = app[APP_PROJECT_DBAPI] - assert db # nosec - await db.update_project_last_change_timestamp(ProjectIDStr(f"{project_uuid}")) + await _projects_repository.patch_project( + app=app, + project_uuid=project_uuid, + new_partial_project_data={}, # <-- no changes, just update timestamp + ) -async def patch_project( +async def patch_project_for_user( app: web.Application, *, user_id: UserID, @@ -384,39 +459,15 @@ async def patch_project( project_template=new_template_type, ) - # 5. Patch the project - await _projects_repository.patch_project( - app=app, + # 5. Get user primary group ID (for frontend) + user: dict = await users_service.get_user(app, user_id) + + # 6. Patch the project & Notify users involved in the project + await patch_project_and_notify_users( + app, project_uuid=project_uuid, - new_partial_project_data=patch_project_data, - ) - # 6. Notify users involved in the project - project_with_workbench = await _projects_repository.get_project_with_workbench( - app=app, project_uuid=project_uuid - ) - project_document = ProjectDocument( - uuid=project_with_workbench.uuid, - workspace_id=project_with_workbench.workspace_id, - name=project_with_workbench.name, - description=project_with_workbench.description, - thumbnail=project_with_workbench.thumbnail, - last_change_date=project_with_workbench.last_change_date, - classifiers=project_with_workbench.classifiers, - dev=project_with_workbench.dev, - quality=project_with_workbench.quality, - workbench=project_with_workbench.workbench, - ui=project_with_workbench.ui, - type=cast(ProjectTypeAPI, project_with_workbench.type), - template_type=cast(ProjectTemplateType, project_with_workbench.template_type), - ) - document_version = await _get_and_increment_project_document_version( - app=app, project_uuid=project_uuid - ) - await notify_project_document_updated( - app=app, - project_id=project_uuid, - version=document_version, - document=project_document, + patch_project_data=patch_project_data, + user_primary_gid=user["primary_gid"], ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py index bd8d57886e4..f381933f130 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py @@ -67,6 +67,7 @@ async def delete_project_as_admin( try: # 1. hide with _monitor_step(state, name="hide"): + # NOTE: We do not need to use PROJECT_DB_UPDATE_REDIS_LOCK_KEY lock, as hidden field is not passed to frontend project = await _projects_repository.patch_project( app, project_uuid=project_uuid, diff --git a/services/web/server/src/simcore_service_webserver/projects/_socketio.py b/services/web/server/src/simcore_service_webserver/projects/_socketio.py index 6352354237b..eb81f32dae2 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_socketio.py +++ b/services/web/server/src/simcore_service_webserver/projects/_socketio.py @@ -26,6 +26,7 @@ class BaseEvent(BaseModel): class ProjectDocumentEvent(BaseEvent): project_id: ProjectID + user_primary_gid: int version: int document: ProjectDocument @@ -61,6 +62,7 @@ async def notify_project_document_updated( app: web.Application, *, project_id: ProjectID, + user_primary_gid: int, version: int, document: ProjectDocument, ) -> None: @@ -69,6 +71,7 @@ async def notify_project_document_updated( data={ **ProjectDocumentEvent( project_id=project_id, + user_primary_gid=user_primary_gid, version=version, document=document, ).model_dump(mode="json", by_alias=True), diff --git a/services/web/server/src/simcore_service_webserver/projects/_trash_service.py b/services/web/server/src/simcore_service_webserver/projects/_trash_service.py index cacc98597ff..45082ecfefe 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_trash_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_trash_service.py @@ -88,7 +88,7 @@ async def trash_project( product_name=product_name, ) - await _projects_service.patch_project( + await _projects_service.patch_project_for_user( app, user_id=user_id, product_name=product_name, @@ -109,7 +109,7 @@ async def untrash_project( project_id: ProjectID, ) -> None: # NOTE: check_user_project_permission is inside projects_api.patch_project - await _projects_service.patch_project( + await _projects_service.patch_project_for_user( app, user_id=user_id, product_name=product_name, diff --git a/services/web/server/src/simcore_service_webserver/projects/_workspaces_service.py b/services/web/server/src/simcore_service_webserver/projects/_workspaces_service.py index 33af944fbd7..afeaf66b584 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_workspaces_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_workspaces_service.py @@ -10,7 +10,7 @@ from ..db.plugin import get_asyncpg_engine from ..users import users_service from ..workspaces.api import check_user_workspace_access -from . import _folders_repository, _groups_repository, _projects_repository +from . import _folders_repository, _groups_repository, _projects_service from ._access_rights_service import get_user_project_access_rights from .exceptions import ProjectInvalidRightsError @@ -51,15 +51,15 @@ async def move_project_into_workspace( ) # 4. Update workspace ID on the project resource - await _projects_repository.patch_project( + user = await users_service.get_user(app, user_id=user_id) + await _projects_service.patch_project_and_notify_users( app=app, - connection=conn, project_uuid=project_id, - new_partial_project_data={"workspace_id": workspace_id}, + patch_project_data={"workspace_id": workspace_id}, + user_primary_gid=user["primary_gid"], ) # 5. Remove all project permissions, leave only the user who moved the project - user = await users_service.get_user(app, user_id=user_id) await _groups_repository.delete_all_project_groups( app, connection=conn, project_id=project_id ) diff --git a/services/web/server/src/simcore_service_webserver/projects/api.py b/services/web/server/src/simcore_service_webserver/projects/api.py index 96bb5948527..35abd6b8ef6 100644 --- a/services/web/server/src/simcore_service_webserver/projects/api.py +++ b/services/web/server/src/simcore_service_webserver/projects/api.py @@ -14,6 +14,7 @@ batch_get_project_name, delete_project_by_user, get_project_dict_legacy, + patch_project_and_notify_users, ) __all__: tuple[str, ...] = ( @@ -25,6 +26,7 @@ "has_user_project_access_rights", "list_projects", "delete_project_by_user", + "patch_project_and_notify_users", ) diff --git a/services/web/server/tests/unit/with_dbs/03/test_project_db.py b/services/web/server/tests/unit/with_dbs/03/test_project_db.py index 969fcd27301..e8731a32fb0 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/03/test_project_db.py @@ -17,7 +17,6 @@ import pytest import sqlalchemy as sa from aiohttp.test_utils import TestClient -from common_library.dict_tools import copy_from_dict_ex, remap_keys from faker import Faker from models_library.projects import ProjectID, ProjectTemplateType from models_library.projects_nodes_io import NodeID, NodeIDStr @@ -50,7 +49,6 @@ ProjectNodeRequiredInputsNotSetError, ProjectNotFoundError, ) -from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.users.exceptions import UserNotFoundError from simcore_service_webserver.utils import to_datetime from sqlalchemy.engine.result import Row @@ -712,89 +710,6 @@ async def test_get_node_ids_from_project( assert node_ids_inside_project == set(some_projects_and_nodes[project_id]) -@pytest.mark.parametrize( - "user_role", - [UserRole.USER], -) -async def test_replace_user_project( - db_api: ProjectDBAPI, - user_project: ProjectDict, - logged_user: UserInfoDict, - osparc_product_name: str, - postgres_db: sa.engine.Engine, - aiopg_engine: aiopg.sa.engine.Engine, -): - PROJECT_DICT_IGNORE_FIELDS = {"lastChangeDate"} - original_project = remap_keys( - user_project, - rename={"trashedAt": "trashed"}, - ) - - # replace the project with the same should do nothing - working_project = await db_api.replace_project( - original_project, - user_id=logged_user["id"], - product_name=osparc_product_name, - project_uuid=original_project["uuid"], - ) - assert copy_from_dict_ex( - original_project, PROJECT_DICT_IGNORE_FIELDS - ) == copy_from_dict_ex(working_project, PROJECT_DICT_IGNORE_FIELDS) - _assert_projects_to_product_db_row( - postgres_db, working_project, osparc_product_name - ) - await _assert_projects_nodes_db_rows(aiopg_engine, working_project) - - # now let's create some outputs (similar to what happens when running services) - NODE_INDEX = 1 # this is not the file-picker - node_id = tuple(working_project["workbench"].keys())[NODE_INDEX] - node_data = working_project["workbench"][node_id] - node_data["progress"] = 100 - node_data["outputs"] = { - "output_1": { - "store": 0, - "path": "687b8dc2-fea2-11ec-b7fd-02420a6e3a4d/d61a2ec8-19b4-4375-adcb-fdd22f850333/single_number.txt", - "eTag": "c4ca4238a0b923820dcc509a6f75849b", - }, - "output_2": 5, - } - node_data["runHash"] = ( - "5b0583fa546ac82f0e41cef9705175b7187ce3928ba42892e842add912c16676" - ) - # replacing with the new entries shall return the very same data - replaced_project = await db_api.replace_project( - working_project, - user_id=logged_user["id"], - product_name=osparc_product_name, - project_uuid=working_project["uuid"], - ) - assert copy_from_dict_ex( - working_project, PROJECT_DICT_IGNORE_FIELDS - ) == copy_from_dict_ex(replaced_project, PROJECT_DICT_IGNORE_FIELDS) - _assert_projects_to_product_db_row( - postgres_db, replaced_project, osparc_product_name - ) - await _assert_projects_nodes_db_rows(aiopg_engine, replaced_project) - - # the frontend sends project without some fields, but for FRONTEND type of nodes - # replacing should keep the values - FRONTEND_EXCLUDED_FIELDS = ["outputs", "progress", "runHash"] - incoming_frontend_project = deepcopy(original_project) - for node_data in incoming_frontend_project["workbench"].values(): - if "frontend" not in node_data["key"]: - for field in FRONTEND_EXCLUDED_FIELDS: - node_data.pop(field, None) - replaced_project = await db_api.replace_project( - incoming_frontend_project, - user_id=logged_user["id"], - product_name=osparc_product_name, - project_uuid=incoming_frontend_project["uuid"], - ) - assert copy_from_dict_ex( - working_project, PROJECT_DICT_IGNORE_FIELDS - ) == copy_from_dict_ex(replaced_project, PROJECT_DICT_IGNORE_FIELDS) - - @pytest.mark.parametrize("user_role", [UserRole.ANONYMOUS]) # worst case @pytest.mark.parametrize("access_rights", [x.value for x in ProjectAccessRights]) async def test_has_permission( From 25f6e97698080ffda567615c01bde9a7cf3b93df Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 16:08:18 +0200 Subject: [PATCH 04/35] Clarifies progress field deprecation status in comment Updates the comment to note that the progress field, while marked deprecated, is still actively used by the frontend file picker. Helps prevent confusion for future maintainers about its current usage. --- packages/models-library/src/models_library/projects_nodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/projects_nodes.py b/packages/models-library/src/models_library/projects_nodes.py index 9792ba33ec5..82d9764e1e7 100644 --- a/packages/models-library/src/models_library/projects_nodes.py +++ b/packages/models-library/src/models_library/projects_nodes.py @@ -164,7 +164,7 @@ class Node(BaseModel): ge=0, le=100, description="the node progress value (deprecated in DB, still used for API only)", - deprecated=True, + deprecated=True, # <-- I think this is not true, it is still used by the File Picker (frontend node) ), ] = None From 6befcd94f98ee764c6bc53648595d1e2cc725aca Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 16:27:56 +0200 Subject: [PATCH 05/35] Adds atomic project workbench updates with Redis locking and notifications Introduces a locked and versioned update mechanism for project workbench modifications, ensuring atomic operations using Redis distributed locks and version increments. Notifies users of project document updates after each successful change, reducing race conditions and improving consistency in collaborative environments. --- .../projects/_projects_repository_legacy.py | 146 ++++++++++++++++-- 1 file changed, 131 insertions(+), 15 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py index b9d8a774818..02bdb54b71f 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py @@ -5,6 +5,7 @@ """ +import datetime import logging from contextlib import AsyncExitStack from typing import Any, Self, cast @@ -15,6 +16,7 @@ from aiopg.sa import Engine from aiopg.sa.connection import SAConnection from aiopg.sa.result import ResultProxy, RowProxy +from models_library.api_schemas_webserver.projects import ProjectDocument from models_library.basic_types import IDStr from models_library.folders import FolderQuery, FolderScope from models_library.groups import GroupID @@ -22,7 +24,9 @@ from models_library.projects import ( ProjectID, ProjectListAtDB, + ProjectTemplateType, ) +from models_library.projects import ProjectType as ProjectTypeAPI from models_library.projects_comments import CommentID, ProjectsCommentsDB from models_library.projects_nodes import Node from models_library.projects_nodes_io import NodeID, NodeIDStr @@ -40,6 +44,7 @@ from pydantic.types import PositiveInt from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY from servicelib.logging_utils import get_log_record_extra, log_context +from servicelib.redis import exclusive from simcore_postgres_database.aiopg_errors import UniqueViolation from simcore_postgres_database.models.groups import user_to_groups from simcore_postgres_database.models.project_to_groups import project_to_groups @@ -61,7 +66,6 @@ ProjectNodesRepo, ) from simcore_postgres_database.webserver_models import ( - ProjectTemplateType, ProjectType, projects, users, @@ -74,7 +78,9 @@ from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type +from ..redis import get_redis_lock_manager_client_sdk from ..utils import now_str +from . import _projects_repository from ._comments_repository import ( create_project_comment, delete_project_comment, @@ -93,6 +99,7 @@ create_project_access_rights, patch_workbench, ) +from ._socketio import notify_project_document_updated from .exceptions import ( ProjectDeleteError, ProjectInvalidRightsError, @@ -115,6 +122,33 @@ field=IDStr("last_change_date"), direction=OrderDirection.DESC ) +# Project locking and versioning constants +PROJECT_DOCUMENT_VERSION_KEY: str = "projects:{}:version" +PROJECT_DB_UPDATE_REDIS_LOCK_KEY: str = "project_db_update:{}" + + +async def _get_and_increment_project_document_version( + app: web.Application, project_uuid: ProjectID +) -> int: + """ + Atomically gets and increments the project document version using Redis. + Returns the incremented version number. + + Args: + app: The web application instance + project_uuid: The project UUID to get/increment version for + + Returns: + The new (incremented) version number + """ + from ..redis import get_redis_document_manager_client_sdk + + redis_client_sdk = get_redis_document_manager_client_sdk(app) + version_key = PROJECT_DOCUMENT_VERSION_KEY.format(project_uuid) + # If key doesn't exist, it's created with value 0 and then incremented to 1 + return await redis_client_sdk.redis.incr(version_key) + + # pylint: disable=too-many-public-methods # NOTE: https://github.com/ITISFoundation/osparc-simcore/issues/3516 @@ -819,12 +853,6 @@ async def get_project_product(self, project_uuid: ProjectID) -> ProductName: raise ProjectNotFoundError(project_uuid=project_uuid) return cast(str, row[0]) - # @exclusive( - # get_redis_lock_manager_client_sdk(app), - # lock_key=PROJECT_DB_UPDATE_REDIS_LOCK_KEY.format(project_uuid), - # blocking=True, - # blocking_timeout=datetime.timedelta(seconds=30), - # ) async def delete_project(self, user_id: int, project_uuid: str): _logger.info( "Deleting project with %s for user with %s", @@ -860,10 +888,10 @@ async def update_project_node_data( partial_workbench_data: dict[NodeIDStr, Any] = { NodeIDStr(f"{node_id}"): new_node_data, } - return await self._update_project_workbench( + return await self._update_project_workbench_with_lock_and_notify( partial_workbench_data, user_id=user_id, - project_uuid=f"{project_uuid}", + project_uuid=project_uuid, product_name=product_name, allow_workbench_changes=False, ) @@ -886,14 +914,102 @@ async def update_project_multiple_node_data( msg=f"update multiple nodes on {project_uuid=} for {user_id=}", extra=get_log_record_extra(user_id=user_id), ): - return await self._update_project_workbench( + return await self._update_project_workbench_with_lock_and_notify( partial_workbench_data, user_id=user_id, - project_uuid=f"{project_uuid}", + project_uuid=project_uuid, product_name=product_name, allow_workbench_changes=False, ) + async def _update_project_workbench_with_lock_and_notify( + self, + partial_workbench_data: dict[NodeIDStr, Any], + *, + user_id: UserID, + project_uuid: ProjectID, + product_name: str | None = None, + allow_workbench_changes: bool, + ) -> tuple[ProjectDict, dict[NodeIDStr, Any]]: + """ + Updates project workbench with Redis lock and user notification. + + This method performs the following operations atomically: + 1. Updates the project workbench in the database + 2. Retrieves the updated project with workbench + 3. Creates a project document + 4. Increments the document version + 5. Notifies users about the project update + + Note: + This function is decorated with Redis exclusive lock to ensure + thread-safe operations on the project document. + """ + + @exclusive( + get_redis_lock_manager_client_sdk(self._app), + lock_key=PROJECT_DB_UPDATE_REDIS_LOCK_KEY.format(project_uuid), + blocking=True, + blocking_timeout=datetime.timedelta(seconds=30), + ) + async def _update_workbench_and_notify() -> ( + tuple[ProjectDict, dict[NodeIDStr, Any]] + ): + # Update the workbench + updated_project, changed_entries = await self._update_project_workbench( + partial_workbench_data, + user_id=user_id, + project_uuid=f"{project_uuid}", + product_name=product_name, + allow_workbench_changes=allow_workbench_changes, + ) + + # Get user's primary group ID for notification + async with self.engine.acquire() as conn: + user_primary_gid = await self._get_user_primary_group_gid(conn, user_id) + + # Get the full project with workbench for document creation + project_with_workbench = ( + await _projects_repository.get_project_with_workbench( + app=self._app, project_uuid=project_uuid + ) + ) + + # Create project document + project_document = ProjectDocument( + uuid=project_with_workbench.uuid, + workspace_id=project_with_workbench.workspace_id, + name=project_with_workbench.name, + description=project_with_workbench.description, + thumbnail=project_with_workbench.thumbnail, + last_change_date=project_with_workbench.last_change_date, + classifiers=project_with_workbench.classifiers, + dev=project_with_workbench.dev, + quality=project_with_workbench.quality, + workbench=project_with_workbench.workbench, + ui=project_with_workbench.ui, + type=cast(ProjectTypeAPI, project_with_workbench.type), + template_type=cast( + ProjectTemplateType, project_with_workbench.template_type + ), + ) + + # Increment document version and notify users + document_version = await _get_and_increment_project_document_version( + app=self._app, project_uuid=project_uuid + ) + await notify_project_document_updated( + app=self._app, + project_id=project_uuid, + user_primary_gid=user_primary_gid, + version=document_version, + document=project_document, + ) + + return updated_project, changed_entries + + return await _update_workbench_and_notify() + async def _update_project_workbench( self, partial_workbench_data: dict[NodeIDStr, Any], @@ -980,10 +1096,10 @@ async def add_project_node( exclude_unset=True, ), } - await self._update_project_workbench( + await self._update_project_workbench_with_lock_and_notify( partial_workbench_data, user_id=user_id, - project_uuid=f"{project_id}", + project_uuid=project_id, product_name=product_name, allow_workbench_changes=True, ) @@ -998,10 +1114,10 @@ async def remove_project_node( partial_workbench_data: dict[NodeIDStr, Any] = { NodeIDStr(f"{node_id}"): None, } - await self._update_project_workbench( + await self._update_project_workbench_with_lock_and_notify( partial_workbench_data, user_id=user_id, - project_uuid=f"{project_id}", + project_uuid=project_id, allow_workbench_changes=True, ) project_nodes_repo = ProjectNodesRepo(project_uuid=project_id) From 3d7c505bef6fa5b6cf01b7b408b97f431f96ccac Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 19:08:10 +0200 Subject: [PATCH 06/35] refactor duplication --- .../src/servicelib/redis/__init__.py | 12 +++- .../redis/_project_document_version.py | 56 +++++++++++++++++++ .../projects/_projects_repository_legacy.py | 42 ++++---------- .../projects/_projects_service.py | 34 ++--------- .../projects/_socketio.py | 50 ----------------- 5 files changed, 83 insertions(+), 111 deletions(-) create mode 100644 packages/service-library/src/servicelib/redis/_project_document_version.py diff --git a/packages/service-library/src/servicelib/redis/__init__.py b/packages/service-library/src/servicelib/redis/__init__.py index 9e63a9f6525..0374bd986f2 100644 --- a/packages/service-library/src/servicelib/redis/__init__.py +++ b/packages/service-library/src/servicelib/redis/__init__.py @@ -8,6 +8,12 @@ ProjectLockError, ) from ._models import RedisManagerDBConfig +from ._project_document_version import ( + PROJECT_DB_UPDATE_REDIS_LOCK_KEY, + PROJECT_DOCUMENT_VERSION_KEY, + get_and_increment_project_document_version, + get_project_document_version, +) from ._project_lock import ( get_project_locked_state, is_project_locked, @@ -19,15 +25,17 @@ "CouldNotAcquireLockError", "CouldNotConnectToRedisError", "exclusive", + "get_and_increment_project_document_version", + "get_project_document_version", "get_project_locked_state", "handle_redis_returns_union_types", "is_project_locked", "LockLostError", + "PROJECT_DB_UPDATE_REDIS_LOCK_KEY", + "PROJECT_DOCUMENT_VERSION_KEY", "ProjectLockError", "RedisClientSDK", "RedisClientsManager", "RedisManagerDBConfig", "with_project_locked", ) - -# nopycln: file diff --git a/packages/service-library/src/servicelib/redis/_project_document_version.py b/packages/service-library/src/servicelib/redis/_project_document_version.py new file mode 100644 index 00000000000..c55f303ffe1 --- /dev/null +++ b/packages/service-library/src/servicelib/redis/_project_document_version.py @@ -0,0 +1,56 @@ +"""Project document versioning utilities. + +This module provides utilities for managing project document versions using Redis. +The versioning system ensures that all users working on a project are synchronized +with the latest changes through atomic version incrementing. +""" + +from typing import Final + +from models_library.projects import ProjectID + +from ._client import RedisClientSDK + +# Redis key patterns +PROJECT_DOCUMENT_VERSION_KEY: Final[str] = "projects:{}:version" +PROJECT_DB_UPDATE_REDIS_LOCK_KEY: Final[str] = "project_db_update:{}" + + +async def get_and_increment_project_document_version( + redis_client: RedisClientSDK, project_uuid: ProjectID +) -> int: + """ + Atomically gets and increments the project document version using Redis. + Returns the incremented version number. + + This function ensures thread-safe version incrementing by using Redis INCR command + which is atomic. The version starts at 1 for the first call. + + Args: + redis_client: The Redis client SDK instance + project_uuid: The project UUID to get/increment version for + + Returns: + The new (incremented) version number + """ + version_key = PROJECT_DOCUMENT_VERSION_KEY.format(project_uuid) + # If key doesn't exist, it's created with value 0 and then incremented to 1 + return await redis_client.redis.incr(version_key) + + +async def get_project_document_version( + redis_client: RedisClientSDK, project_uuid: ProjectID +) -> int: + """ + Gets the current project document version without incrementing it. + + Args: + redis_client: The Redis client SDK instance + project_uuid: The project UUID to get version for + + Returns: + The current version number (0 if no version exists yet) + """ + version_key = PROJECT_DOCUMENT_VERSION_KEY.format(project_uuid) + version = await redis_client.redis.get(version_key) + return int(version) if version is not None else 0 diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py index 02bdb54b71f..7c817946119 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py @@ -44,7 +44,11 @@ from pydantic.types import PositiveInt from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY from servicelib.logging_utils import get_log_record_extra, log_context -from servicelib.redis import exclusive +from servicelib.redis import ( + PROJECT_DB_UPDATE_REDIS_LOCK_KEY, + exclusive, + get_and_increment_project_document_version, +) from simcore_postgres_database.aiopg_errors import UniqueViolation from simcore_postgres_database.models.groups import user_to_groups from simcore_postgres_database.models.project_to_groups import project_to_groups @@ -78,7 +82,10 @@ from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type -from ..redis import get_redis_lock_manager_client_sdk +from ..redis import ( + get_redis_document_manager_client_sdk, + get_redis_lock_manager_client_sdk, +) from ..utils import now_str from . import _projects_repository from ._comments_repository import ( @@ -122,32 +129,6 @@ field=IDStr("last_change_date"), direction=OrderDirection.DESC ) -# Project locking and versioning constants -PROJECT_DOCUMENT_VERSION_KEY: str = "projects:{}:version" -PROJECT_DB_UPDATE_REDIS_LOCK_KEY: str = "project_db_update:{}" - - -async def _get_and_increment_project_document_version( - app: web.Application, project_uuid: ProjectID -) -> int: - """ - Atomically gets and increments the project document version using Redis. - Returns the incremented version number. - - Args: - app: The web application instance - project_uuid: The project UUID to get/increment version for - - Returns: - The new (incremented) version number - """ - from ..redis import get_redis_document_manager_client_sdk - - redis_client_sdk = get_redis_document_manager_client_sdk(app) - version_key = PROJECT_DOCUMENT_VERSION_KEY.format(project_uuid) - # If key doesn't exist, it's created with value 0 and then incremented to 1 - return await redis_client_sdk.redis.incr(version_key) - # pylint: disable=too-many-public-methods # NOTE: https://github.com/ITISFoundation/osparc-simcore/issues/3516 @@ -995,8 +976,9 @@ async def _update_workbench_and_notify() -> ( ) # Increment document version and notify users - document_version = await _get_and_increment_project_document_version( - app=self._app, project_uuid=project_uuid + redis_client_sdk = get_redis_document_manager_client_sdk(self._app) + document_version = await get_and_increment_project_document_version( + redis_client=redis_client_sdk, project_uuid=project_uuid ) await notify_project_document_updated( app=self._app, diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 2db61c26a1a..64899c09083 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -84,7 +84,9 @@ ServiceWasNotFoundError, ) from servicelib.redis import ( + PROJECT_DB_UPDATE_REDIS_LOCK_KEY, exclusive, + get_and_increment_project_document_version, get_project_locked_state, is_project_locked, with_project_locked, @@ -168,33 +170,6 @@ log = logging.getLogger(__name__) -PROJECT_DOCUMENT_VERSION_KEY: str = "projects:{}:version" -PROJECT_DB_UPDATE_REDIS_LOCK_KEY: str = "project_db_update:{}" - - -async def _get_and_increment_project_document_version( - app: web.Application, project_uuid: ProjectID -) -> int: - """ - Atomically gets and increments the project document version using Redis. - - This function ensures thread-safe version incrementing by using Redis INCR command - which is atomic. The version starts at 1 for the first call. - - Args: - app: The web application instance - project_uuid: The project UUID - - Returns: - The new incremented version number - """ - redis_client_sdk = get_redis_document_manager_client_sdk(app) - version_key = PROJECT_DOCUMENT_VERSION_KEY.format(project_uuid) - - # Redis INCR is atomic and returns the new value - # If key doesn't exist, it's created with value 0 and then incremented to 1 - return await redis_client_sdk.redis.incr(version_key) - async def patch_project_and_notify_users( app: web.Application, @@ -256,8 +231,9 @@ async def _patch_and_notify() -> None: ProjectTemplateType, project_with_workbench.template_type ), ) - document_version = await _get_and_increment_project_document_version( - app=app, project_uuid=project_uuid + redis_client_sdk = get_redis_document_manager_client_sdk(app) + document_version = await get_and_increment_project_document_version( + redis_client=redis_client_sdk, project_uuid=project_uuid ) await notify_project_document_updated( app=app, diff --git a/services/web/server/src/simcore_service_webserver/projects/_socketio.py b/services/web/server/src/simcore_service_webserver/projects/_socketio.py index eb81f32dae2..deaea1975fb 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_socketio.py +++ b/services/web/server/src/simcore_service_webserver/projects/_socketio.py @@ -9,8 +9,6 @@ from ..socketio.messages import send_message_to_project_room -# SOCKET_IO_PROJECT_CREATED_EVENT: Final[str] = "projectDocument:created" -# SOCKET_IO_PROJECT_DELETED_EVENT: Final[str] = "projectDocument:deleted" SOCKET_IO_PROJECT_DOCUMENT_UPDATED_EVENT: Final[str] = "projectDocument:updated" @@ -31,33 +29,6 @@ class ProjectDocumentEvent(BaseEvent): document: ProjectDocument -# async def notify_project_created( -# app: web.Application, -# *, -# project_id: ProjectID, -# product_name: ProductName, -# user_group_id: GroupID, -# project_name: str, -# created: datetime.datetime, -# modified: datetime.datetime, -# ) -> None: -# notification_message = SocketMessageDict( -# event_type=SOCKET_IO_PROJECT_CREATED_EVENT, -# data={ -# **ProjectCreatedOrUpdatedEvent( -# product_name=product_name, -# project_id=project_id, -# user_group_id=user_group_id, -# name=project_name, -# created=created, -# modified=modified, -# ).model_dump(mode="json", by_alias=True), -# }, -# ) - -# await send_message_to_project_room(app, project_id, notification_message) - - async def notify_project_document_updated( app: web.Application, *, @@ -78,24 +49,3 @@ async def notify_project_document_updated( }, ) await send_message_to_project_room(app, project_id, notification_message) - - -# async def notify_project_deleted( -# app: web.Application, -# *, -# project_id: ProjectID, -# product_name: ProductName, -# user_group_id: GroupID, -# ) -> None: -# notification_message = SocketMessageDict( -# event_type=SOCKET_IO_PROJECT_DELETED_EVENT, -# data={ -# **ProjectDeletedEvent( -# product_name=product_name, -# project_id=project_id, -# user_group_id=user_group_id, -# ).model_dump(mode="json", by_alias=True), -# }, -# ) - -# await send_message_to_project_room(app, project_id, notification_message) From 7956f60928c4eadc11ce1012d3cca50927eba825 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 19:30:42 +0200 Subject: [PATCH 07/35] fix relative import --- .../src/models_library/api_schemas_webserver/socketio.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/socketio.py b/packages/models-library/src/models_library/api_schemas_webserver/socketio.py index d5eb5456bd7..32753cdb829 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/socketio.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/socketio.py @@ -1,7 +1,6 @@ -from models_library.projects import ProjectID - from ..basic_types import IDStr from ..groups import GroupID +from ..projects import ProjectID from ..users import UserID From 72bdab89c958300339dd77b6020d2f3b07a03312 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 19:43:26 +0200 Subject: [PATCH 08/35] fix failing tests --- .../servicelib/redis/_project_document_version.py | 2 +- .../projects/_crud_api_read.py | 13 ++----------- .../tests/unit/isolated/test_projects__db_utils.py | 2 +- 3 files changed, 4 insertions(+), 13 deletions(-) diff --git a/packages/service-library/src/servicelib/redis/_project_document_version.py b/packages/service-library/src/servicelib/redis/_project_document_version.py index c55f303ffe1..ab8d24cc56c 100644 --- a/packages/service-library/src/servicelib/redis/_project_document_version.py +++ b/packages/service-library/src/servicelib/redis/_project_document_version.py @@ -35,7 +35,7 @@ async def get_and_increment_project_document_version( """ version_key = PROJECT_DOCUMENT_VERSION_KEY.format(project_uuid) # If key doesn't exist, it's created with value 0 and then incremented to 1 - return await redis_client.redis.incr(version_key) + return await int(redis_client.redis.incr(version_key)) async def get_project_document_version( diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py index 8e9bdd70584..8fcf5de57fd 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py @@ -16,9 +16,6 @@ from models_library.workspaces import WorkspaceID, WorkspaceQuery, WorkspaceScope from pydantic import NonNegativeInt from servicelib.utils import logged_gather -from simcore_postgres_database.webserver_models import ( - ProjectTemplateType as ProjectTemplateTypeDB, -) from ..folders import _folders_repository from ..users import users_service @@ -174,9 +171,7 @@ async def list_projects( # pylint: disable=too-many-arguments ), # attrs filter_by_project_type=ProjectTypeAPI.to_project_type_db(project_type), - filter_by_template_type=( - ProjectTemplateTypeDB(template_type) if template_type else None - ), + filter_by_template_type=template_type, filter_trashed=trashed, filter_hidden=show_hidden, # composed attrs @@ -228,11 +223,7 @@ async def list_projects_full_depth( # pylint: disable=too-many-arguments filter_by_project_type=ProjectTypeAPI.to_project_type_db( filter_by_project_type ), - filter_by_template_type=( - ProjectTemplateTypeDB(filter_by_template_type) - if filter_by_template_type - else None - ), + filter_by_template_type=filter_by_template_type, search_by_multi_columns=search_by_multi_columns, search_by_project_name=search_by_project_name, offset=offset, diff --git a/services/web/server/tests/unit/isolated/test_projects__db_utils.py b/services/web/server/tests/unit/isolated/test_projects__db_utils.py index 7f42f14f23b..1d8349fb2c7 100644 --- a/services/web/server/tests/unit/isolated/test_projects__db_utils.py +++ b/services/web/server/tests/unit/isolated/test_projects__db_utils.py @@ -22,12 +22,12 @@ convert_to_schema_names, create_project_access_rights, patch_workbench, - update_workbench, ) from simcore_service_webserver.projects._projects_repository_legacy_utils import ( DB_EXCLUSIVE_COLUMNS, SCHEMA_NON_NULL_KEYS, assemble_array_groups, + update_workbench, ) from simcore_service_webserver.projects.exceptions import ( NodeNotFoundError, From d4beeedf314ad815a8d99c806549255980991cc0 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 19:48:55 +0200 Subject: [PATCH 09/35] minor --- README_pycrdt_test.md | 0 packages/service-library/src/servicelib/redis/__init__.py | 2 ++ .../server/src/simcore_service_webserver/socketio/_handlers.py | 2 +- .../server/src/simcore_service_webserver/socketio/messages.py | 1 - 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 README_pycrdt_test.md diff --git a/README_pycrdt_test.md b/README_pycrdt_test.md deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/packages/service-library/src/servicelib/redis/__init__.py b/packages/service-library/src/servicelib/redis/__init__.py index 0374bd986f2..6b6f4607ac1 100644 --- a/packages/service-library/src/servicelib/redis/__init__.py +++ b/packages/service-library/src/servicelib/redis/__init__.py @@ -39,3 +39,5 @@ "RedisManagerDBConfig", "with_project_locked", ) + +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/socketio/_handlers.py b/services/web/server/src/simcore_service_webserver/socketio/_handlers.py index c1312228420..ecf576dfc48 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/socketio/_handlers.py @@ -104,7 +104,7 @@ async def _set_user_in_group_rooms( @register_socketio_handler -async def connect( # <- MD: here the frontend connects to the socket.io server +async def connect( socket_id: SocketID, environ: EnvironDict, app: web.Application ) -> bool: """socketio reserved handler for when the fontend connects through socket.io diff --git a/services/web/server/src/simcore_service_webserver/socketio/messages.py b/services/web/server/src/simcore_service_webserver/socketio/messages.py index c4680da0599..5d6a973ea30 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/messages.py +++ b/services/web/server/src/simcore_service_webserver/socketio/messages.py @@ -30,7 +30,6 @@ SOCKET_IO_NODE_UPDATED_EVENT: Final[str] = "nodeUpdated" SOCKET_IO_PROJECT_UPDATED_EVENT: Final[str] = "projectStateUpdated" -# SOCKET_IO_PROJECT_STORE_UPDATED_EVENT: Final[str] = "projectStoreUpdated" SOCKET_IO_WALLET_OSPARC_CREDITS_UPDATED_EVENT: Final[str] = "walletOsparcCreditsUpdated" From dbf088f604abc03f836fff97863b8adf606828b6 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 19:50:07 +0200 Subject: [PATCH 10/35] minor --- .../src/servicelib/redis/_project_document_version.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/service-library/src/servicelib/redis/_project_document_version.py b/packages/service-library/src/servicelib/redis/_project_document_version.py index ab8d24cc56c..c0a7ec63086 100644 --- a/packages/service-library/src/servicelib/redis/_project_document_version.py +++ b/packages/service-library/src/servicelib/redis/_project_document_version.py @@ -35,7 +35,8 @@ async def get_and_increment_project_document_version( """ version_key = PROJECT_DOCUMENT_VERSION_KEY.format(project_uuid) # If key doesn't exist, it's created with value 0 and then incremented to 1 - return await int(redis_client.redis.incr(version_key)) + output = await redis_client.redis.incr(version_key) + return int(output) async def get_project_document_version( From dddb7ce60a7165445fb9b679b2cccd7cb03e530a Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 19:52:28 +0200 Subject: [PATCH 11/35] minor --- .../projects/_projects_service.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 6e024406849..80bacf30b1a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -419,17 +419,19 @@ async def patch_project_for_user( "write": True, "delete": True, } - user: dict = await users_service.get_user(app, project_db.prj_owner) - _prj_owner_primary_group = f"{user['primary_gid']}" + prj_owner_user: dict = await users_service.get_user(app, project_db.prj_owner) + _prj_owner_primary_group = f"{prj_owner_user['primary_gid']}" if _prj_owner_primary_group not in new_prj_access_rights: raise ProjectOwnerNotFoundInTheProjectAccessRightsError if new_prj_access_rights[_prj_owner_primary_group] != _prj_required_permissions: raise ProjectOwnerNotFoundInTheProjectAccessRightsError - # 4. If patching template type + # 4. Get user primary group ID + current_user: dict = await users_service.get_user(app, user_id) + + # 5. If patching template type if new_template_type := patch_project_data.get("template_type"): # 4.1 Check if user is a tester - current_user: dict = await users_service.get_user(app, user_id) if UserRole(current_user["role"]) < UserRole.TESTER: raise InsufficientRoleForProjectTemplateTypeUpdateError # 4.2 Check the compatibility of the template type with the project @@ -446,15 +448,12 @@ async def patch_project_for_user( project_template=new_template_type, ) - # 5. Get user primary group ID (for frontend) - user: dict = await users_service.get_user(app, user_id) - # 6. Patch the project & Notify users involved in the project await patch_project_and_notify_users( app, project_uuid=project_uuid, patch_project_data=patch_project_data, - user_primary_gid=user["primary_gid"], + user_primary_gid=current_user["primary_gid"], ) From bfbc2852581e66a4694a293d2e254ceb3ae08cdf Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 20:14:33 +0200 Subject: [PATCH 12/35] minor --- services/web/server/src/simcore_service_webserver/rabbitmq.py | 4 +--- .../web/server/tests/unit/with_dbs/docker-compose-devel.yml | 2 +- services/web/server/tests/unit/with_dbs/docker-compose.yml | 2 +- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/rabbitmq.py b/services/web/server/src/simcore_service_webserver/rabbitmq.py index 95852012541..c415c53057f 100644 --- a/services/web/server/src/simcore_service_webserver/rabbitmq.py +++ b/services/web/server/src/simcore_service_webserver/rabbitmq.py @@ -47,9 +47,7 @@ async def _rabbitmq_client_cleanup_ctx(app: web.Application) -> AsyncIterator[No # injects healthcheck healthcheck: HealthCheck = app[HealthCheck.__name__] - healthcheck.on_healthcheck.append( - _on_healthcheck_async_adapter - ) # <-- MD: check here! + healthcheck.on_healthcheck.append(_on_healthcheck_async_adapter) yield diff --git a/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml b/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml index faae7d7c6cd..dd89755d90d 100644 --- a/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml +++ b/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml @@ -63,7 +63,7 @@ services: "--loglevel", "verbose", "--databases", - "8", + "11", "--appendonly", "yes", "--requirepass", diff --git a/services/web/server/tests/unit/with_dbs/docker-compose.yml b/services/web/server/tests/unit/with_dbs/docker-compose.yml index 65a9eabb85a..6482ab57d4e 100644 --- a/services/web/server/tests/unit/with_dbs/docker-compose.yml +++ b/services/web/server/tests/unit/with_dbs/docker-compose.yml @@ -46,7 +46,7 @@ services: "--loglevel", "verbose", "--databases", - "8", + "11", "--appendonly", "yes", "--requirepass", From 500485891c7f79463be25c2c46f3e7c37c59442a Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 20:23:42 +0200 Subject: [PATCH 13/35] add tests --- .../redis/test_project_document_version.py | 68 +++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 packages/service-library/tests/redis/test_project_document_version.py diff --git a/packages/service-library/tests/redis/test_project_document_version.py b/packages/service-library/tests/redis/test_project_document_version.py new file mode 100644 index 00000000000..8d86c04e27b --- /dev/null +++ b/packages/service-library/tests/redis/test_project_document_version.py @@ -0,0 +1,68 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument + +from typing import cast +from uuid import UUID + +import pytest +from faker import Faker +from models_library.projects import ProjectID +from servicelib.redis import RedisClientSDK +from servicelib.redis._project_document_version import ( + get_and_increment_project_document_version, + get_project_document_version, +) + +pytest_simcore_core_services_selection = [ + "redis", +] +pytest_simcore_ops_services_selection = [ + "redis-commander", +] + + +@pytest.fixture() +def project_uuid(faker: Faker) -> ProjectID: + return cast(UUID, faker.uuid4(cast_to=None)) + + +async def test_project_document_version_workflow( + redis_client_sdk: RedisClientSDK, project_uuid: ProjectID +): + """Test the complete workflow of getting and incrementing project document versions.""" + + # Initially, version should be 0 (no version exists yet) + current_version = await get_project_document_version(redis_client_sdk, project_uuid) + assert current_version == 0 + + # First increment should return 1 + new_version = await get_and_increment_project_document_version( + redis_client_sdk, project_uuid + ) + assert new_version == 1 + + # Getting current version should now return 1 + current_version = await get_project_document_version(redis_client_sdk, project_uuid) + assert current_version == 1 + + # Second increment should return 2 + new_version = await get_and_increment_project_document_version( + redis_client_sdk, project_uuid + ) + assert new_version == 2 + + # Getting current version should now return 2 + current_version = await get_project_document_version(redis_client_sdk, project_uuid) + assert current_version == 2 + + # Multiple increments should work correctly + for expected_version in range(3, 6): + new_version = await get_and_increment_project_document_version( + redis_client_sdk, project_uuid + ) + assert new_version == expected_version + + current_version = await get_project_document_version( + redis_client_sdk, project_uuid + ) + assert current_version == expected_version From 714404c94e1fadee224381fce2055ffb6a3a711e Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 21:01:32 +0200 Subject: [PATCH 14/35] add tests --- ..._patch_project_and_notify_users_locking.py | 216 ++++++++++++++++++ 1 file changed, 216 insertions(+) create mode 100644 services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py b/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py new file mode 100644 index 00000000000..63eb0cebe0a --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py @@ -0,0 +1,216 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +""" +Tests for patch_project_and_notify_users function focusing on the Redis locking mechanism +and concurrent access patterns. + +These tests verify that: +1. Sequential operations work correctly +2. Concurrent operations are properly serialized by Redis locks +3. Version increments are consistent and atomic +4. Different projects don't interfere with each other +5. Mixed concurrent operations (patches + version checks) maintain consistency +6. Error handling during concurrent access is robust +""" + +import asyncio +from http import HTTPStatus +from typing import Any +from uuid import uuid4 + +import pytest +from aiohttp.test_utils import TestClient +from faker import Faker +from models_library.projects import ProjectID +from pytest_simcore.helpers.webserver_users import UserInfoDict +from servicelib.aiohttp import status +from servicelib.redis import get_and_increment_project_document_version +from simcore_service_webserver.db.models import UserRole +from simcore_service_webserver.projects._projects_service import ( + patch_project_and_notify_users, +) +from simcore_service_webserver.projects.models import ProjectDict +from simcore_service_webserver.redis import get_redis_document_manager_client_sdk + + +@pytest.fixture +def concurrent_patch_data_list(faker: Faker) -> list[dict[str, Any]]: + """Generate multiple different patch data for concurrent testing""" + return [{"name": f"concurrent-test-{faker.word()}-{i}"} for i in range(10)] + + +@pytest.fixture +def user_primary_gid(logged_user: UserInfoDict) -> int: + """Extract user primary group ID from logged user""" + return int(logged_user["primary_gid"]) + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.USER, status.HTTP_200_OK), + ], +) +async def test_patch_project_and_notify_users_sequential( + user_role: UserRole, + expected: HTTPStatus, + client: TestClient, + user_project: ProjectDict, + user_primary_gid: int, + faker: Faker, +): + """Test that patch_project_and_notify_users works correctly in sequential mode""" + assert client.app + project_uuid = ProjectID(user_project["uuid"]) + + # Perform sequential patches + patch_data_1 = {"name": f"sequential-test-{faker.word()}-1"} + patch_data_2 = {"name": f"sequential-test-{faker.word()}-2"} + + # First patch + await patch_project_and_notify_users( + app=client.app, + project_uuid=project_uuid, + patch_project_data=patch_data_1, + user_primary_gid=user_primary_gid, + ) + + # Get version after first patch + redis_client = get_redis_document_manager_client_sdk(client.app) + version_1 = await get_and_increment_project_document_version( + redis_client=redis_client, project_uuid=project_uuid + ) + + # Second patch + await patch_project_and_notify_users( + app=client.app, + project_uuid=project_uuid, + patch_project_data=patch_data_2, + user_primary_gid=user_primary_gid, + ) + + # Get version after second patch + version_2 = await get_and_increment_project_document_version( + redis_client=redis_client, project_uuid=project_uuid + ) + + # Verify versions are incrementing correctly + assert version_2 > version_1 + assert version_2 - version_1 == 2 # Two operations should increment by 2 + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.USER, status.HTTP_200_OK), + ], +) +async def test_patch_project_and_notify_users_concurrent_locking( + user_role: UserRole, + expected: HTTPStatus, + client: TestClient, + user_project: ProjectDict, + user_primary_gid: int, + concurrent_patch_data_list: list[dict[str, Any]], +): + """Test that patch_project_and_notify_users handles concurrent access correctly with locking""" + assert client.app + project_uuid = ProjectID(user_project["uuid"]) + + # Get initial version + redis_client = get_redis_document_manager_client_sdk(client.app) + initial_version = await get_and_increment_project_document_version( + redis_client=redis_client, project_uuid=project_uuid + ) + + # Create concurrent patch tasks + tasks = [ + patch_project_and_notify_users( + app=client.app, + project_uuid=project_uuid, + patch_project_data=patch_data, + user_primary_gid=user_primary_gid, + ) + for patch_data in concurrent_patch_data_list + ] + + # Execute all tasks concurrently + await asyncio.gather(*tasks) + + # Get final version + final_version = await get_and_increment_project_document_version( + redis_client=redis_client, project_uuid=project_uuid + ) + + # Verify that all concurrent operations were processed and version incremented correctly + # Each patch_project_and_notify_users call should increment version by 1 + expected_final_version = initial_version + len(concurrent_patch_data_list) + 1 + assert final_version == expected_final_version + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.USER, status.HTTP_200_OK), + ], +) +async def test_patch_project_and_notify_users_concurrent_different_projects( + user_role: UserRole, + expected: HTTPStatus, + client: TestClient, + user_project: ProjectDict, + user_primary_gid: int, + faker: Faker, +): + """Test that concurrent patches to different projects don't interfere with each other""" + assert client.app + + # Use different project UUIDs to simulate different projects + project_uuid_1 = ProjectID(user_project["uuid"]) + project_uuid_2 = ProjectID(str(uuid4())) # Simulate second project + project_uuid_3 = ProjectID(str(uuid4())) # Simulate third project + + redis_client = get_redis_document_manager_client_sdk(client.app) + + # Get initial versions + initial_version_1 = await get_and_increment_project_document_version( + redis_client=redis_client, project_uuid=project_uuid_1 + ) + initial_version_2 = await get_and_increment_project_document_version( + redis_client=redis_client, project_uuid=project_uuid_2 + ) + initial_version_3 = await get_and_increment_project_document_version( + redis_client=redis_client, project_uuid=project_uuid_3 + ) + + # Note: For this test, we only test the locking mechanism for project_1 + # as we would need to create actual projects for the others + patch_data = {"name": f"concurrent-different-projects-{faker.word()}"} + + # Only test project_1 (real project) but verify version isolation + await patch_project_and_notify_users( + app=client.app, + project_uuid=project_uuid_1, + patch_project_data=patch_data, + user_primary_gid=user_primary_gid, + ) + + # Get final versions + final_version_1 = await get_and_increment_project_document_version( + redis_client=redis_client, project_uuid=project_uuid_1 + ) + final_version_2 = await get_and_increment_project_document_version( + redis_client=redis_client, project_uuid=project_uuid_2 + ) + final_version_3 = await get_and_increment_project_document_version( + redis_client=redis_client, project_uuid=project_uuid_3 + ) + + # Verify that only project_1 version changed + assert final_version_1 == initial_version_1 + 2 # One patch + one version check + assert final_version_2 == initial_version_2 + 1 # Only version check + assert final_version_3 == initial_version_3 + 1 # Only version check From 47fa937fe4165eb09aa03b1a9311cfa2737d75f8 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Tue, 22 Jul 2025 21:14:25 +0200 Subject: [PATCH 15/35] add tests --- ...est_projects_patch_project_and_notify_users_locking.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py b/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py index 63eb0cebe0a..82359b09fe3 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py @@ -7,14 +7,6 @@ """ Tests for patch_project_and_notify_users function focusing on the Redis locking mechanism and concurrent access patterns. - -These tests verify that: -1. Sequential operations work correctly -2. Concurrent operations are properly serialized by Redis locks -3. Version increments are consistent and atomic -4. Different projects don't interfere with each other -5. Mixed concurrent operations (patches + version checks) maintain consistency -6. Error handling during concurrent access is robust """ import asyncio From 30b8d197d1622b465f4eede6188af0120399e90a Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 23 Jul 2025 09:16:26 +0200 Subject: [PATCH 16/35] add back function that is being used --- .../projects/_projects_repository_legacy.py | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py index 7c817946119..b707adc01e0 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py @@ -23,6 +23,7 @@ from models_library.products import ProductName from models_library.projects import ( ProjectID, + ProjectIDStr, ProjectListAtDB, ProjectTemplateType, ) @@ -834,6 +835,29 @@ async def get_project_product(self, project_uuid: ProjectID) -> ProductName: raise ProjectNotFoundError(project_uuid=project_uuid) return cast(str, row[0]) + async def update_project_owner_without_checking_permissions( # <-- Used by Garbage Collector + self, + project_uuid: ProjectIDStr, + *, + new_project_owner: UserID, + new_project_access_rights: dict, + ) -> None: + """The garbage collector needs to alter the row without passing through the + permissions layer (sic).""" + async with self.engine.acquire() as conn: + # now update it + result: ResultProxy = await conn.execute( + projects.update() + .values( + prj_owner=new_project_owner, + access_rights=new_project_access_rights, + last_change_date=now_str(), + ) + .where(projects.c.uuid == project_uuid) + ) + result_row_count: int = result.rowcount + assert result_row_count == 1 # nosec + async def delete_project(self, user_id: int, project_uuid: str): _logger.info( "Deleting project with %s for user with %s", From 82d6d17debebb719e755be2d6aafa66f5fd91d73 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 23 Jul 2025 11:01:08 +0200 Subject: [PATCH 17/35] review @GitHK @pcrespov --- .../src/servicelib/redis/__init__.py | 6 ++--- .../redis/_project_document_version.py | 22 ++-------------- .../redis/test_project_document_version.py | 26 +++---------------- .../projects/_projects_repository_legacy.py | 4 +-- .../projects/_projects_service.py | 4 +-- ..._patch_project_and_notify_users_locking.py | 22 ++++++++-------- 6 files changed, 23 insertions(+), 61 deletions(-) diff --git a/packages/service-library/src/servicelib/redis/__init__.py b/packages/service-library/src/servicelib/redis/__init__.py index 6b6f4607ac1..152b3053893 100644 --- a/packages/service-library/src/servicelib/redis/__init__.py +++ b/packages/service-library/src/servicelib/redis/__init__.py @@ -11,8 +11,7 @@ from ._project_document_version import ( PROJECT_DB_UPDATE_REDIS_LOCK_KEY, PROJECT_DOCUMENT_VERSION_KEY, - get_and_increment_project_document_version, - get_project_document_version, + increment_and_return_project_document_version, ) from ._project_lock import ( get_project_locked_state, @@ -25,8 +24,7 @@ "CouldNotAcquireLockError", "CouldNotConnectToRedisError", "exclusive", - "get_and_increment_project_document_version", - "get_project_document_version", + "increment_and_return_project_document_version", "get_project_locked_state", "handle_redis_returns_union_types", "is_project_locked", diff --git a/packages/service-library/src/servicelib/redis/_project_document_version.py b/packages/service-library/src/servicelib/redis/_project_document_version.py index c0a7ec63086..7193adb8ca7 100644 --- a/packages/service-library/src/servicelib/redis/_project_document_version.py +++ b/packages/service-library/src/servicelib/redis/_project_document_version.py @@ -16,11 +16,11 @@ PROJECT_DB_UPDATE_REDIS_LOCK_KEY: Final[str] = "project_db_update:{}" -async def get_and_increment_project_document_version( +async def increment_and_return_project_document_version( redis_client: RedisClientSDK, project_uuid: ProjectID ) -> int: """ - Atomically gets and increments the project document version using Redis. + Atomically increments and returns the project document version using Redis. Returns the incremented version number. This function ensures thread-safe version incrementing by using Redis INCR command @@ -37,21 +37,3 @@ async def get_and_increment_project_document_version( # If key doesn't exist, it's created with value 0 and then incremented to 1 output = await redis_client.redis.incr(version_key) return int(output) - - -async def get_project_document_version( - redis_client: RedisClientSDK, project_uuid: ProjectID -) -> int: - """ - Gets the current project document version without incrementing it. - - Args: - redis_client: The Redis client SDK instance - project_uuid: The project UUID to get version for - - Returns: - The current version number (0 if no version exists yet) - """ - version_key = PROJECT_DOCUMENT_VERSION_KEY.format(project_uuid) - version = await redis_client.redis.get(version_key) - return int(version) if version is not None else 0 diff --git a/packages/service-library/tests/redis/test_project_document_version.py b/packages/service-library/tests/redis/test_project_document_version.py index 8d86c04e27b..47b9ca4da30 100644 --- a/packages/service-library/tests/redis/test_project_document_version.py +++ b/packages/service-library/tests/redis/test_project_document_version.py @@ -9,8 +9,7 @@ from models_library.projects import ProjectID from servicelib.redis import RedisClientSDK from servicelib.redis._project_document_version import ( - get_and_increment_project_document_version, - get_project_document_version, + increment_and_return_project_document_version, ) pytest_simcore_core_services_selection = [ @@ -31,38 +30,21 @@ async def test_project_document_version_workflow( ): """Test the complete workflow of getting and incrementing project document versions.""" - # Initially, version should be 0 (no version exists yet) - current_version = await get_project_document_version(redis_client_sdk, project_uuid) - assert current_version == 0 - # First increment should return 1 - new_version = await get_and_increment_project_document_version( + new_version = await increment_and_return_project_document_version( redis_client_sdk, project_uuid ) assert new_version == 1 - # Getting current version should now return 1 - current_version = await get_project_document_version(redis_client_sdk, project_uuid) - assert current_version == 1 - # Second increment should return 2 - new_version = await get_and_increment_project_document_version( + new_version = await increment_and_return_project_document_version( redis_client_sdk, project_uuid ) assert new_version == 2 - # Getting current version should now return 2 - current_version = await get_project_document_version(redis_client_sdk, project_uuid) - assert current_version == 2 - # Multiple increments should work correctly for expected_version in range(3, 6): - new_version = await get_and_increment_project_document_version( + new_version = await increment_and_return_project_document_version( redis_client_sdk, project_uuid ) assert new_version == expected_version - - current_version = await get_project_document_version( - redis_client_sdk, project_uuid - ) - assert current_version == expected_version diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py index b707adc01e0..086a3039cf7 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py @@ -48,7 +48,7 @@ from servicelib.redis import ( PROJECT_DB_UPDATE_REDIS_LOCK_KEY, exclusive, - get_and_increment_project_document_version, + increment_and_return_project_document_version, ) from simcore_postgres_database.aiopg_errors import UniqueViolation from simcore_postgres_database.models.groups import user_to_groups @@ -1001,7 +1001,7 @@ async def _update_workbench_and_notify() -> ( # Increment document version and notify users redis_client_sdk = get_redis_document_manager_client_sdk(self._app) - document_version = await get_and_increment_project_document_version( + document_version = await increment_and_return_project_document_version( redis_client=redis_client_sdk, project_uuid=project_uuid ) await notify_project_document_updated( diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 80bacf30b1a..36a00bcea34 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -86,8 +86,8 @@ from servicelib.redis import ( PROJECT_DB_UPDATE_REDIS_LOCK_KEY, exclusive, - get_and_increment_project_document_version, get_project_locked_state, + increment_and_return_project_document_version, is_project_locked, with_project_locked, ) @@ -232,7 +232,7 @@ async def _patch_and_notify() -> None: ), ) redis_client_sdk = get_redis_document_manager_client_sdk(app) - document_version = await get_and_increment_project_document_version( + document_version = await increment_and_return_project_document_version( redis_client=redis_client_sdk, project_uuid=project_uuid ) await notify_project_document_updated( diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py b/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py index 82359b09fe3..03e2e10e784 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py @@ -20,7 +20,7 @@ from models_library.projects import ProjectID from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status -from servicelib.redis import get_and_increment_project_document_version +from servicelib.redis import increment_and_return_project_document_version from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects._projects_service import ( patch_project_and_notify_users, @@ -73,7 +73,7 @@ async def test_patch_project_and_notify_users_sequential( # Get version after first patch redis_client = get_redis_document_manager_client_sdk(client.app) - version_1 = await get_and_increment_project_document_version( + version_1 = await increment_and_return_project_document_version( redis_client=redis_client, project_uuid=project_uuid ) @@ -86,7 +86,7 @@ async def test_patch_project_and_notify_users_sequential( ) # Get version after second patch - version_2 = await get_and_increment_project_document_version( + version_2 = await increment_and_return_project_document_version( redis_client=redis_client, project_uuid=project_uuid ) @@ -115,7 +115,7 @@ async def test_patch_project_and_notify_users_concurrent_locking( # Get initial version redis_client = get_redis_document_manager_client_sdk(client.app) - initial_version = await get_and_increment_project_document_version( + initial_version = await increment_and_return_project_document_version( redis_client=redis_client, project_uuid=project_uuid ) @@ -134,7 +134,7 @@ async def test_patch_project_and_notify_users_concurrent_locking( await asyncio.gather(*tasks) # Get final version - final_version = await get_and_increment_project_document_version( + final_version = await increment_and_return_project_document_version( redis_client=redis_client, project_uuid=project_uuid ) @@ -169,13 +169,13 @@ async def test_patch_project_and_notify_users_concurrent_different_projects( redis_client = get_redis_document_manager_client_sdk(client.app) # Get initial versions - initial_version_1 = await get_and_increment_project_document_version( + initial_version_1 = await increment_and_return_project_document_version( redis_client=redis_client, project_uuid=project_uuid_1 ) - initial_version_2 = await get_and_increment_project_document_version( + initial_version_2 = await increment_and_return_project_document_version( redis_client=redis_client, project_uuid=project_uuid_2 ) - initial_version_3 = await get_and_increment_project_document_version( + initial_version_3 = await increment_and_return_project_document_version( redis_client=redis_client, project_uuid=project_uuid_3 ) @@ -192,13 +192,13 @@ async def test_patch_project_and_notify_users_concurrent_different_projects( ) # Get final versions - final_version_1 = await get_and_increment_project_document_version( + final_version_1 = await increment_and_return_project_document_version( redis_client=redis_client, project_uuid=project_uuid_1 ) - final_version_2 = await get_and_increment_project_document_version( + final_version_2 = await increment_and_return_project_document_version( redis_client=redis_client, project_uuid=project_uuid_2 ) - final_version_3 = await get_and_increment_project_document_version( + final_version_3 = await increment_and_return_project_document_version( redis_client=redis_client, project_uuid=project_uuid_3 ) From 8ba5ce7b3fba5227db4f1a5a7e78d95f384df24b Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 23 Jul 2025 13:09:45 +0200 Subject: [PATCH 18/35] fix failing tests --- .../server/tests/integration/01/test_garbage_collection.py | 5 +++-- .../server/tests/unit/with_dbs/03/trash/test_trash_rest.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/services/web/server/tests/integration/01/test_garbage_collection.py b/services/web/server/tests/integration/01/test_garbage_collection.py index 7e28b90e789..73861583fe5 100644 --- a/services/web/server/tests/integration/01/test_garbage_collection.py +++ b/services/web/server/tests/integration/01/test_garbage_collection.py @@ -1172,7 +1172,8 @@ async def test_t11_owner_and_all_users_in_group_marked_as_guests( await assert_projects_count(aiopg_engine, 1) await assert_user_is_owner_of_project(aiopg_engine, u1, project) - await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) + # await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) + await gc_core.collect_garbage(app=client.app) - await assert_users_count(aiopg_engine, 0) + await assert_users_count(aiopg_engine, 0) # <-- MD: this is where the test fails await assert_projects_count(aiopg_engine, 0) diff --git a/services/web/server/tests/unit/with_dbs/03/trash/test_trash_rest.py b/services/web/server/tests/unit/with_dbs/03/trash/test_trash_rest.py index efdd44ed32a..7c44f939cd8 100644 --- a/services/web/server/tests/unit/with_dbs/03/trash/test_trash_rest.py +++ b/services/web/server/tests/unit/with_dbs/03/trash/test_trash_rest.py @@ -1162,7 +1162,7 @@ async def test_trash_folder_with_subfolder_and_project_and_empty_bin( # waits for deletion async for attempt in AsyncRetrying( - stop=stop_after_attempt(3), wait=wait_fixed(1), reraise=True + stop=stop_after_attempt(10), wait=wait_fixed(1), reraise=True ): with attempt: # GET trashed parent folder From 92af8304f806815144c52873db84f1024b11ddf5 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 23 Jul 2025 13:26:27 +0200 Subject: [PATCH 19/35] Fix test_open_project_more_than_limitation_of_max_studies_open_per_user by adding websocket connections - Add _connect_websocket calls before _open_project calls - Convert status codes to HTTPStatus enum values for type compatibility - Follows the same pattern as other successful tests in the file --- .../02/test_projects_states_handlers.py | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index 91137d4f88f..fcf130861da 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -848,6 +848,7 @@ async def test_open_project_more_than_limitation_of_max_studies_open_per_user( client: TestClient, logged_user, client_session_id_factory: Callable, + socketio_client_factory: Callable, user_project: ProjectDict, shared_project: ProjectDict, expected: ExpectedResponse, @@ -857,19 +858,35 @@ async def test_open_project_more_than_limitation_of_max_studies_open_per_user( mocked_notifications_plugin: dict[str, mock.Mock], ): client_id_1 = client_session_id_factory() + await _connect_websocket( + socketio_client_factory, + user_role != UserRole.ANONYMOUS, + client, + client_id_1, + ) await _open_project( client, client_id_1, user_project, - expected.ok if user_role != UserRole.GUEST else status.HTTP_200_OK, + HTTPStatus(expected.ok) if user_role != UserRole.GUEST else HTTPStatus.OK, ) client_id_2 = client_session_id_factory() + await _connect_websocket( + socketio_client_factory, + user_role != UserRole.ANONYMOUS, + client, + client_id_2, + ) await _open_project( client, client_id_2, shared_project, - expected.conflict if user_role != UserRole.GUEST else status.HTTP_409_CONFLICT, + ( + HTTPStatus(expected.conflict) + if user_role != UserRole.GUEST + else HTTPStatus.CONFLICT + ), ) From cc9db9a49b8a9e0b8c51036c700bd77028bfbab1 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 23 Jul 2025 13:49:11 +0200 Subject: [PATCH 20/35] fix failing tests --- .../02/test_projects_states_handlers.py | 91 +++++++++++++++++-- 1 file changed, 82 insertions(+), 9 deletions(-) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index fcf130861da..4173fd2a328 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -357,6 +357,7 @@ async def test_open_project( logged_user: UserInfoDict, user_project: ProjectDict, client_session_id_factory: Callable[[], str], + socketio_client_factory: Callable, expected: HTTPStatus, save_state: bool, mocked_dynamic_services_interface: dict[str, mock.Mock], @@ -370,8 +371,15 @@ async def test_open_project( # POST /v0/projects/{project_id}:open # open project assert client.app + client_id = client_session_id_factory() + await _connect_websocket( + socketio_client_factory, + expected != status.HTTP_401_UNAUTHORIZED, + client, + client_id, + ) url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected) @@ -440,6 +448,7 @@ async def test_open_project__in_debt( logged_user: UserInfoDict, user_project: ProjectDict, client_session_id_factory: Callable[[], str], + socketio_client_factory: Callable, expected: HTTPStatus, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_service_resources: ServiceResourcesDict, @@ -477,8 +486,15 @@ async def test_open_project__in_debt( # POST /v0/projects/{project_id}:open assert client.app + client_id = client_session_id_factory() + await _connect_websocket( + socketio_client_factory, + check_connection=True, # USER role always connects + client=client, + client_id=client_id, + ) url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected) assert mock_get_project_wallet_total_credits.assert_called_once @@ -498,6 +514,7 @@ async def test_open_template_project_for_edition( logged_user: UserInfoDict, create_template_project: Callable[..., Awaitable[ProjectDict]], client_session_id_factory: Callable[[], str], + socketio_client_factory: Callable, expected: HTTPStatus, save_state: bool, mocked_dynamic_services_interface: dict[str, mock.Mock], @@ -517,8 +534,15 @@ async def test_open_template_project_for_edition( logged_user["primary_gid"]: {"read": True, "write": True, "delete": False} } ) + client_id = client_session_id_factory() + await _connect_websocket( + socketio_client_factory, + check_connection=expected != status.HTTP_401_UNAUTHORIZED, + client=client, + client_id=client_id, + ) url = client.app.router["open_project"].url_for(project_id=template_project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected) if resp.status == status.HTTP_200_OK: @@ -579,6 +603,7 @@ async def test_open_template_project_for_edition_with_missing_write_rights( logged_user: UserInfoDict, create_template_project: Callable[..., Awaitable[ProjectDict]], client_session_id_factory: Callable[[], str], + socketio_client_factory: Callable, expected: HTTPStatus, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_service_resources: ServiceResourcesDict, @@ -594,8 +619,15 @@ async def test_open_template_project_for_edition_with_missing_write_rights( logged_user["primary_gid"]: {"read": True, "write": False, "delete": True} } ) + client_id = client_session_id_factory() + await _connect_websocket( + socketio_client_factory, + check_connection=expected != status.HTTP_401_UNAUTHORIZED, + client=client, + client_id=client_id, + ) url = client.app.router["open_project"].url_for(project_id=template_project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected) @@ -611,6 +643,7 @@ async def test_open_project_with_small_amount_of_dynamic_services_starts_them_au logged_user: UserInfoDict, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], client_session_id_factory: Callable, + socketio_client_factory: Callable, expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], @@ -636,8 +669,15 @@ async def test_open_project_with_small_amount_of_dynamic_services_starts_them_au for service_id in range(num_service_already_running) ] + client_id = client_session_id_factory() + await _connect_websocket( + socketio_client_factory, + check_connection=True, # standard_user_role is always USER or TESTER + client=client, + client_id=client_id, + ) url = client.app.router["open_project"].url_for(project_id=project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected.ok) mocked_notifications_plugin["subscribe"].assert_called_once_with( client.app, ProjectID(project["uuid"]) @@ -657,6 +697,7 @@ async def test_open_project_with_disable_service_auto_start_set_overrides_behavi logged_user: UserInfoDict, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], client_session_id_factory: Callable, + socketio_client_factory: Callable, expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], @@ -678,14 +719,22 @@ async def test_open_project_with_disable_service_auto_start_set_overrides_behavi for service_id in range(num_service_already_running) ] + client_id = client_session_id_factory() + sio = await _connect_websocket( + socketio_client_factory, + check_connection=True, # standard_user_role is always USER or TESTER + client=client, + client_id=client_id, + ) url = ( client.app.router["open_project"] .url_for(project_id=project["uuid"]) .with_query(disable_service_auto_start=f"{True}") ) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected.ok) + await sio.disconnect() mocked_notifications_plugin["subscribe"].assert_called_once_with( client.app, ProjectID(project["uuid"]) ) @@ -701,6 +750,7 @@ async def test_open_project_with_large_amount_of_dynamic_services_does_not_start logged_user: UserInfoDict, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], client_session_id_factory: Callable, + socketio_client_factory: Callable, expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], @@ -728,8 +778,15 @@ async def test_open_project_with_large_amount_of_dynamic_services_does_not_start for service_id in range(num_service_already_running) ] + client_id = client_session_id_factory() + await _connect_websocket( + socketio_client_factory, + check_connection=True, # standard_user_role is always USER or TESTER + client=client, + client_id=client_id, + ) url = client.app.router["open_project"].url_for(project_id=project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected.ok) mocked_notifications_plugin["subscribe"].assert_called_once_with( client.app, ProjectID(project["uuid"]) @@ -748,6 +805,7 @@ async def test_open_project_with_large_amount_of_dynamic_services_starts_them_if logged_user: UserInfoDict, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], client_session_id_factory: Callable, + socketio_client_factory: Callable, expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], @@ -778,8 +836,15 @@ async def test_open_project_with_large_amount_of_dynamic_services_starts_them_if for service_id in range(num_service_already_running) ] + client_id = client_session_id_factory() + await _connect_websocket( + socketio_client_factory, + check_connection=True, # standard_user_role is always USER or TESTER + client=client, + client_id=client_id, + ) url = client.app.router["open_project"].url_for(project_id=project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected.ok) mocked_notifications_plugin["subscribe"].assert_called_once_with( client.app, ProjectID(project["uuid"]) @@ -796,6 +861,7 @@ async def test_open_project_with_deprecated_services_ok_but_does_not_start_dynam logged_user, user_project, client_session_id_factory: Callable, + socketio_client_factory: Callable, expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_service_resources: ServiceResourcesDict, @@ -806,8 +872,15 @@ async def test_open_project_with_deprecated_services_ok_but_does_not_start_dynam mock_catalog_api["get_service"].return_value["deprecated"] = ( datetime.now(UTC) - timedelta(days=1) ).isoformat() + client_id = client_session_id_factory() + await _connect_websocket( + socketio_client_factory, + check_connection=True, # standard_user_role is always USER or TESTER + client=client, + client_id=client_id, + ) url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) - resp = await client.post(url, json=client_session_id_factory()) + resp = await client.post(url, json=client_id) await assert_status(resp, expected.ok) mocked_notifications_plugin["subscribe"].assert_called_once_with( client.app, ProjectID(user_project["uuid"]) From e9b432a57d24885035ab9d8a9e42d2438eb4f6cf Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 23 Jul 2025 14:28:17 +0200 Subject: [PATCH 21/35] fix issues after resolving conflicts --- .../projects/_projects_service.py | 95 +++++++++++++++++-- 1 file changed, 88 insertions(+), 7 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 246a3eb818c..57fe286d84b 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -29,15 +29,17 @@ DynamicServiceStart, DynamicServiceStop, ) -from models_library.api_schemas_webserver.projects import ProjectGet, ProjectPatch +from models_library.api_schemas_webserver.projects import ( + ProjectDocument, + ProjectGet, + ProjectPatch, +) from models_library.basic_types import KeyIDStr from models_library.errors import ErrorDict from models_library.groups import GroupID from models_library.products import ProductName -from models_library.projects import ( - Project, - ProjectID, -) +from models_library.projects import Project, ProjectID, ProjectTemplateType +from models_library.projects import ProjectType as ProjectTypeAPI from models_library.projects_access import Owner from models_library.projects_nodes import Node, NodeState, PartialNode from models_library.projects_nodes_io import NodeID, NodeIDStr, PortLink @@ -82,8 +84,10 @@ ServiceWasNotFoundError, ) from servicelib.redis import ( + PROJECT_DB_UPDATE_REDIS_LOCK_KEY, exclusive, get_project_locked_state, + increment_and_return_project_document_version, is_project_locked, with_project_locked, ) @@ -103,6 +107,7 @@ from ..products import products_web from ..rabbitmq import get_rabbitmq_rpc_client from ..redis import ( + get_redis_document_manager_client_sdk, get_redis_lock_manager_client_sdk, ) from ..resource_manager.user_sessions import ( @@ -142,6 +147,7 @@ from ._nodes_utils import set_reservation_same_as_limit, validate_new_service_resources from ._projects_repository_legacy import APP_PROJECT_DBAPI, ProjectDBAPI from ._projects_repository_legacy_utils import PermissionStr +from ._socketio import notify_project_document_updated from .exceptions import ( ClustersKeeperNotAvailableError, DefaultPricingUnitNotFoundError, @@ -169,6 +175,81 @@ PROJECT_REDIS_LOCK_KEY: str = "project:{}" +async def patch_project_and_notify_users( + app: web.Application, + *, + project_uuid: ProjectID, + patch_project_data: dict[str, Any], + user_primary_gid: GroupID, +) -> None: + """ + Patches a project and notifies users involved in the project with version control. + + This function performs the following operations atomically: + 1. Patches the project in the database + 2. Retrieves the updated project with workbench + 3. Creates a project document + 4. Increments the document version + 5. Notifies users about the project update + + Args: + app: The web application instance + project_uuid: The project UUID to patch + patch_project_data: Dictionary containing the project data to patch + user_primary_gid: Primary group ID of the user making the change + + Note: + This function is decorated with Redis exclusive lock to ensure + thread-safe operations on the project document. + """ + + @exclusive( + get_redis_lock_manager_client_sdk(app), + lock_key=PROJECT_DB_UPDATE_REDIS_LOCK_KEY.format(project_uuid), + blocking=True, + blocking_timeout=datetime.timedelta(seconds=30), + ) + async def _patch_and_notify() -> None: + await _projects_repository.patch_project( + app=app, + project_uuid=project_uuid, + new_partial_project_data=patch_project_data, + ) + project_with_workbench = await _projects_repository.get_project_with_workbench( + app=app, project_uuid=project_uuid + ) + project_document = ProjectDocument( + uuid=project_with_workbench.uuid, + workspace_id=project_with_workbench.workspace_id, + name=project_with_workbench.name, + description=project_with_workbench.description, + thumbnail=project_with_workbench.thumbnail, + last_change_date=project_with_workbench.last_change_date, + classifiers=project_with_workbench.classifiers, + dev=project_with_workbench.dev, + quality=project_with_workbench.quality, + workbench=project_with_workbench.workbench, + ui=project_with_workbench.ui, + type=cast(ProjectTypeAPI, project_with_workbench.type), + template_type=cast( + ProjectTemplateType, project_with_workbench.template_type + ), + ) + redis_client_sdk = get_redis_document_manager_client_sdk(app) + document_version = await increment_and_return_project_document_version( + redis_client=redis_client_sdk, project_uuid=project_uuid + ) + await notify_project_document_updated( + app=app, + project_id=project_uuid, + user_primary_gid=user_primary_gid, + version=document_version, + document=project_document, + ) + + await _patch_and_notify() + + def _is_node_dynamic(node_key: str) -> bool: return "/dynamic/" in node_key @@ -349,10 +430,10 @@ async def patch_project_for_user( # 5. If patching template type if new_template_type := patch_project_data.get("template_type"): - # 4.1 Check if user is a tester + # 5.1 Check if user is a tester if UserRole(current_user["role"]) < UserRole.TESTER: raise InsufficientRoleForProjectTemplateTypeUpdateError - # 4.2 Check the compatibility of the template type with the project + # 5.2 Check the compatibility of the template type with the project if project_db.type == ProjectType.STANDARD and new_template_type is not None: raise ProjectTypeAndTemplateIncompatibilityError( project_uuid=project_uuid, From 4b845df638381e6fbbde7846e4644e2c9becaac9 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 23 Jul 2025 15:19:17 +0200 Subject: [PATCH 22/35] Refactors socket connection setup in project state tests Replaces direct websocket connection calls with a unified helper for setting up socket connections, improving test readability and maintainability. Ensures connections are only created for authorized or non-anonymous users, reducing redundant code. --- .../02/test_projects_states_handlers.py | 153 +++++++++--------- 1 file changed, 76 insertions(+), 77 deletions(-) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index 6f38402d101..9416093746a 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -479,7 +479,9 @@ async def test_open_project( logged_user: UserInfoDict, user_project: ProjectDict, client_session_id_factory: Callable[[], str], - socketio_client_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [TestClient, str], Awaitable[tuple[socketio.AsyncClient, _SocketHandlers]] + ], expected: HTTPStatus, save_state: bool, mocked_dynamic_services_interface: dict[str, mock.Mock], @@ -494,12 +496,11 @@ async def test_open_project( # open project assert client.app client_id = client_session_id_factory() - await _connect_websocket( - socketio_client_factory, - expected != status.HTTP_401_UNAUTHORIZED, - client, - client_id, - ) + + # Only create socketio connection for non-anonymous users + if expected != status.HTTP_401_UNAUTHORIZED: + await create_socketio_connection_with_handlers(client, client_id) + url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) resp = await client.post(f"{url}", json=client_id) @@ -571,7 +572,10 @@ async def test_open_project__in_debt( logged_user: UserInfoDict, user_project: ProjectDict, client_session_id_factory: Callable[[], str], - socketio_client_factory: Callable, + # socketio_client_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [TestClient, str], Awaitable[tuple[socketio.AsyncClient, _SocketHandlers]] + ], expected: HTTPStatus, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_service_resources: ServiceResourcesDict, @@ -610,12 +614,9 @@ async def test_open_project__in_debt( # POST /v0/projects/{project_id}:open assert client.app client_id = client_session_id_factory() - await _connect_websocket( - socketio_client_factory, - check_connection=True, # USER role always connects - client=client, - client_id=client_id, - ) + + await create_socketio_connection_with_handlers(client, client_id) + url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected) @@ -637,7 +638,9 @@ async def test_open_template_project_for_edition( logged_user: UserInfoDict, create_template_project: Callable[..., Awaitable[ProjectDict]], client_session_id_factory: Callable[[], str], - socketio_client_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [TestClient, str], Awaitable[tuple[socketio.AsyncClient, _SocketHandlers]] + ], expected: HTTPStatus, save_state: bool, mocked_dynamic_services_interface: dict[str, mock.Mock], @@ -658,12 +661,10 @@ async def test_open_template_project_for_edition( } ) client_id = client_session_id_factory() - await _connect_websocket( - socketio_client_factory, - check_connection=expected != status.HTTP_401_UNAUTHORIZED, - client=client, - client_id=client_id, - ) + + # Only create socketio connection for non-anonymous users + if expected != status.HTTP_401_UNAUTHORIZED: + await create_socketio_connection_with_handlers(client, client_id) url = client.app.router["open_project"].url_for(project_id=template_project["uuid"]) resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected) @@ -724,7 +725,9 @@ async def test_open_template_project_for_edition_with_missing_write_rights( logged_user: UserInfoDict, create_template_project: Callable[..., Awaitable[ProjectDict]], client_session_id_factory: Callable[[], str], - socketio_client_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [TestClient, str], Awaitable[tuple[socketio.AsyncClient, _SocketHandlers]] + ], expected: HTTPStatus, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_service_resources: ServiceResourcesDict, @@ -741,12 +744,10 @@ async def test_open_template_project_for_edition_with_missing_write_rights( } ) client_id = client_session_id_factory() - await _connect_websocket( - socketio_client_factory, - check_connection=expected != status.HTTP_401_UNAUTHORIZED, - client=client, - client_id=client_id, - ) + + # Only create socketio connection for non-anonymous users + if expected != status.HTTP_401_UNAUTHORIZED: + await create_socketio_connection_with_handlers(client, client_id) url = client.app.router["open_project"].url_for(project_id=template_project["uuid"]) resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected) @@ -758,7 +759,9 @@ async def test_open_project_with_small_amount_of_dynamic_services_starts_them_au logged_user: UserInfoDict, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], client_session_id_factory: Callable[[], str], - socketio_client_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [TestClient, str], Awaitable[tuple[socketio.AsyncClient, _SocketHandlers]] + ], expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], @@ -785,12 +788,9 @@ async def test_open_project_with_small_amount_of_dynamic_services_starts_them_au ] client_id = client_session_id_factory() - await _connect_websocket( - socketio_client_factory, - check_connection=True, # standard_user_role is always USER or TESTER - client=client, - client_id=client_id, - ) + # Only create socketio connection for non-anonymous users + if expected.ok: + await create_socketio_connection_with_handlers(client, client_id) url = client.app.router["open_project"].url_for(project_id=project["uuid"]) resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected.ok) @@ -812,7 +812,9 @@ async def test_open_project_with_disable_service_auto_start_set_overrides_behavi logged_user: UserInfoDict, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], client_session_id_factory: Callable[[], str], - socketio_client_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [TestClient, str], Awaitable[tuple[socketio.AsyncClient, _SocketHandlers]] + ], expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], @@ -833,12 +835,9 @@ async def test_open_project_with_disable_service_auto_start_set_overrides_behavi ] client_id = client_session_id_factory() - sio = await _connect_websocket( - socketio_client_factory, - check_connection=True, # standard_user_role is always USER or TESTER - client=client, - client_id=client_id, - ) + # Only create socketio connection for non-anonymous users + if expected.ok: + sio = await create_socketio_connection_with_handlers(client, client_id) url = ( client.app.router["open_project"] .url_for(project_id=project["uuid"]) @@ -847,7 +846,8 @@ async def test_open_project_with_disable_service_auto_start_set_overrides_behavi resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected.ok) - await sio.disconnect() + if expected.ok: + await sio[0].disconnect() mocked_notifications_plugin["subscribe"].assert_called_once_with( client.app, ProjectID(project["uuid"]) ) @@ -863,7 +863,9 @@ async def test_open_project_with_large_amount_of_dynamic_services_does_not_start logged_user: UserInfoDict, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], client_session_id_factory: Callable[[], str], - socketio_client_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [TestClient, str], Awaitable[tuple[socketio.AsyncClient, _SocketHandlers]] + ], expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], @@ -892,12 +894,9 @@ async def test_open_project_with_large_amount_of_dynamic_services_does_not_start ] client_id = client_session_id_factory() - await _connect_websocket( - socketio_client_factory, - check_connection=True, # standard_user_role is always USER or TESTER - client=client, - client_id=client_id, - ) + # Only create socketio connection for non-anonymous users + if expected.ok: + await create_socketio_connection_with_handlers(client, client_id) url = client.app.router["open_project"].url_for(project_id=project["uuid"]) resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected.ok) @@ -918,7 +917,9 @@ async def test_open_project_with_large_amount_of_dynamic_services_starts_them_if logged_user: UserInfoDict, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], client_session_id_factory: Callable[[], str], - socketio_client_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [TestClient, str], Awaitable[tuple[socketio.AsyncClient, _SocketHandlers]] + ], expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], @@ -950,12 +951,9 @@ async def test_open_project_with_large_amount_of_dynamic_services_starts_them_if ] client_id = client_session_id_factory() - await _connect_websocket( - socketio_client_factory, - check_connection=True, # standard_user_role is always USER or TESTER - client=client, - client_id=client_id, - ) + # Only create socketio connection for non-anonymous users + if expected.ok: + await create_socketio_connection_with_handlers(client, client_id) url = client.app.router["open_project"].url_for(project_id=project["uuid"]) resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected.ok) @@ -974,7 +972,9 @@ async def test_open_project_with_deprecated_services_ok_but_does_not_start_dynam logged_user, user_project, client_session_id_factory: Callable[[], str], - socketio_client_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [TestClient, str], Awaitable[tuple[socketio.AsyncClient, _SocketHandlers]] + ], expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_service_resources: ServiceResourcesDict, @@ -986,12 +986,9 @@ async def test_open_project_with_deprecated_services_ok_but_does_not_start_dynam datetime.now(UTC) - timedelta(days=1) ).isoformat() client_id = client_session_id_factory() - await _connect_websocket( - socketio_client_factory, - check_connection=True, # standard_user_role is always USER or TESTER - client=client, - client_id=client_id, - ) + # Only create socketio connection for non-anonymous users + if expected.ok: + await create_socketio_connection_with_handlers(client, client_id) url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) resp = await client.post(url, json=client_id) await assert_status(resp, expected.ok) @@ -1034,7 +1031,9 @@ async def test_open_project_more_than_limitation_of_max_studies_open_per_user( client: TestClient, logged_user, client_session_id_factory: Callable[[], str], - socketio_client_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [TestClient, str], Awaitable[tuple[socketio.AsyncClient, _SocketHandlers]] + ], user_project: ProjectDict, shared_project: ProjectDict, expected: ExpectedResponse, @@ -1044,12 +1043,12 @@ async def test_open_project_more_than_limitation_of_max_studies_open_per_user( mocked_notifications_plugin: dict[str, mock.Mock], ): client_id_1 = client_session_id_factory() - await _connect_websocket( - socketio_client_factory, - user_role != UserRole.ANONYMOUS, - client, - client_id_1, - ) + # Only create socketio connection for non-anonymous users + if user_role != UserRole.ANONYMOUS: + await create_socketio_connection_with_handlers( + client, + client_id_1, + ) await _open_project( client, client_id_1, @@ -1058,12 +1057,12 @@ async def test_open_project_more_than_limitation_of_max_studies_open_per_user( ) client_id_2 = client_session_id_factory() - await _connect_websocket( - socketio_client_factory, - user_role != UserRole.ANONYMOUS, - client, - client_id_2, - ) + # Only create socketio connection for non-anonymous users + if user_role != UserRole.ANONYMOUS: + await create_socketio_connection_with_handlers( + client, + client_id_2, + ) await _open_project( client, client_id_2, From 369d6d7827333f7e4407afeec51579df85f54033 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 23 Jul 2025 15:21:36 +0200 Subject: [PATCH 23/35] generate open api specs --- .../api/v0/openapi.yaml | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index 828ddccef48..ad95be62960 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -17161,6 +17161,17 @@ components: const: hypertool - type: 'null' title: Templatetype + mode: + anyOf: + - type: string + enum: + - workbench + - app + - guided + - standalone + - pipeline + - type: 'null' + title: Mode additionalProperties: true type: object title: StudyUI @@ -17201,6 +17212,17 @@ components: const: hypertool - type: 'null' title: Templatetype + mode: + anyOf: + - type: string + enum: + - workbench + - app + - guided + - standalone + - pipeline + - type: 'null' + title: Mode additionalProperties: true type: object title: StudyUI From 88c88fbefa9f064569109bd5afb5829d4dfd41d5 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 23 Jul 2025 15:32:07 +0200 Subject: [PATCH 24/35] review @pcrespov --- .../projects/_projects_repository_legacy.py | 2 +- .../src/simcore_service_webserver/projects/_projects_service.py | 2 +- .../projects/{_socketio.py => _socketio_service.py} | 0 3 files changed, 2 insertions(+), 2 deletions(-) rename services/web/server/src/simcore_service_webserver/projects/{_socketio.py => _socketio_service.py} (100%) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py index 086a3039cf7..74c062f6589 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py @@ -107,7 +107,7 @@ create_project_access_rights, patch_workbench, ) -from ._socketio import notify_project_document_updated +from ._socketio_service import notify_project_document_updated from .exceptions import ( ProjectDeleteError, ProjectInvalidRightsError, diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 57fe286d84b..5e9a90d8af0 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -147,7 +147,7 @@ from ._nodes_utils import set_reservation_same_as_limit, validate_new_service_resources from ._projects_repository_legacy import APP_PROJECT_DBAPI, ProjectDBAPI from ._projects_repository_legacy_utils import PermissionStr -from ._socketio import notify_project_document_updated +from ._socketio_service import notify_project_document_updated from .exceptions import ( ClustersKeeperNotAvailableError, DefaultPricingUnitNotFoundError, diff --git a/services/web/server/src/simcore_service_webserver/projects/_socketio.py b/services/web/server/src/simcore_service_webserver/projects/_socketio_service.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/projects/_socketio.py rename to services/web/server/src/simcore_service_webserver/projects/_socketio_service.py From 2f2af7745fa091990fd51cc0dd0a724545bf2740 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 23 Jul 2025 15:46:30 +0200 Subject: [PATCH 25/35] minor --- packages/models-library/src/models_library/projects_nodes.py | 2 +- .../simcore_service_webserver/projects/_socketio_service.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/models-library/src/models_library/projects_nodes.py b/packages/models-library/src/models_library/projects_nodes.py index 82d9764e1e7..d7db50cf172 100644 --- a/packages/models-library/src/models_library/projects_nodes.py +++ b/packages/models-library/src/models_library/projects_nodes.py @@ -164,7 +164,7 @@ class Node(BaseModel): ge=0, le=100, description="the node progress value (deprecated in DB, still used for API only)", - deprecated=True, # <-- I think this is not true, it is still used by the File Picker (frontend node) + deprecated=True, # <-- Think this is not true, it is still used by the File Picker (frontend nodes) ), ] = None diff --git a/services/web/server/src/simcore_service_webserver/projects/_socketio_service.py b/services/web/server/src/simcore_service_webserver/projects/_socketio_service.py index deaea1975fb..be1322f35dd 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_socketio_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_socketio_service.py @@ -2,6 +2,7 @@ from aiohttp import web from models_library.api_schemas_webserver.projects import ProjectDocument +from models_library.groups import GroupID from models_library.projects import ProjectID from models_library.socketio import SocketMessageDict from pydantic import AliasGenerator, BaseModel, ConfigDict @@ -24,7 +25,7 @@ class BaseEvent(BaseModel): class ProjectDocumentEvent(BaseEvent): project_id: ProjectID - user_primary_gid: int + user_primary_gid: GroupID version: int document: ProjectDocument @@ -33,7 +34,7 @@ async def notify_project_document_updated( app: web.Application, *, project_id: ProjectID, - user_primary_gid: int, + user_primary_gid: GroupID, version: int, document: ProjectDocument, ) -> None: From e43e858e6c9a9f8434ef2eb84be5f1f7f0138883 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 23 Jul 2025 15:51:00 +0200 Subject: [PATCH 26/35] review @sanderegg --- .../projects/_controller/projects_states_rest.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py index 83b30cada86..7c9ff0bcb0f 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py @@ -3,6 +3,7 @@ import logging from aiohttp import web +from common_library.user_messages import user_message from models_library.api_schemas_webserver.projects import ProjectGet from models_library.api_schemas_webserver.socketio import SocketIORoomStr from pydantic import BaseModel @@ -121,8 +122,11 @@ async def open_project(request: web.Request) -> web.Response: ) as resource_registry: _socket_id = await resource_registry.get_socket_id() if _socket_id is None: - raise web.HTTPBadRequest( - text="Cannot open project without a socket_id, please refresh the page" + raise web.HTTPUnprocessableEntity( + text=user_message( + "Data corruption detected: unable to identify your session (socket_id missing). " + "Please refresh the page and try again. If the problem persists, contact support." + ) ) sio = get_socket_server(request.app) sio.enter_room( From 22df4d8342c0ac61aba76208d0de135b9af5ca11 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 23 Jul 2025 17:54:02 +0200 Subject: [PATCH 27/35] Improves logging and test debug visibility Adds detailed debug logging to lock acquisition, enhances pytest output with DEBUG logs for CI, and clarifies mock subsystem setup for test reliability. Facilitates easier diagnosis of concurrency and storage issues. --- .../src/servicelib/redis/_decorators.py | 13 ++++++++++++- scripts/common-service.Makefile | 1 + .../projects/_projects_service.py | 2 +- services/web/server/tests/unit/with_dbs/conftest.py | 7 ++++++- 4 files changed, 20 insertions(+), 3 deletions(-) diff --git a/packages/service-library/src/servicelib/redis/_decorators.py b/packages/service-library/src/servicelib/redis/_decorators.py index b70bb931432..41510be69c5 100644 --- a/packages/service-library/src/servicelib/redis/_decorators.py +++ b/packages/service-library/src/servicelib/redis/_decorators.py @@ -53,6 +53,10 @@ def exclusive( lock_key -- a string as the name of the lock (good practice: app_name:lock_name) lock_value -- some additional data that can be retrieved by another client if None, it will be automatically filled with the current time and the client name + blocking -- If ``blocking`` is False, always return immediately. If the lock + was acquired, return True, otherwise return False. + blocking_timeout -- specifies the maximum number of seconds to + wait trying to acquire the lock. Raises: - ValueError if used incorrectly @@ -85,6 +89,13 @@ async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: lock_value = f"locked since {arrow.utcnow().format()} by {client.client_name} on {socket.gethostname()}" lock = client.create_lock(redis_lock_key, ttl=DEFAULT_LOCK_TTL) + _logger.debug( + "Acquiring lock '%s' with value '%s' for coroutine '%s'", + redis_lock_key, + lock_value, + coro.__name__, + stacklevel=3, + ) if not await lock.acquire( token=lock_value, blocking=blocking, @@ -92,7 +103,7 @@ async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: blocking_timeout.total_seconds() if blocking_timeout else None ), ): - raise CouldNotAcquireLockError(lock=lock) + raise CouldNotAcquireLockError(lock=lock) # <-- HERE try: async with asyncio.TaskGroup() as tg: diff --git a/scripts/common-service.Makefile b/scripts/common-service.Makefile index 57fb6e3b5b4..53ed164f95e 100644 --- a/scripts/common-service.Makefile +++ b/scripts/common-service.Makefile @@ -175,6 +175,7 @@ _run-test-ci: _check_venv_active --log-date-format="%Y-%m-%d %H:%M:%S" \ --log-format="%(asctime)s %(levelname)s %(message)s" \ --verbose \ + --log-cli-level=DEBUG \ -m "not heavy_load" \ $(PYTEST_ADDITIONAL_PARAMETERS) \ $(TEST_TARGET) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 5e9a90d8af0..96bec0e2305 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -1060,7 +1060,7 @@ async def _remove_service_and_its_data_folders( ) # remove the node's data if any - await storage_service.delete_data_folders_of_project_node( + await storage_service.delete_data_folders_of_project_node( # <-- MD: this one app, f"{project_uuid}", node_uuid, user_id ) diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index 5705c4b95ca..13b967d78e3 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -417,7 +417,12 @@ async def _mock_result() -> None: return_value=TypeAdapter(ByteSize).validate_python("1Gib"), ) - return MockedStorageSubsystem(mock, mock1, mock2, mock3) + return MockedStorageSubsystem( + copy_data_folders_from_project=mock, + delete_project=mock1, + delete_node=mock2, + get_project_total_size_simcore_s3=mock3, + ) @pytest.fixture From 5af742657c391331cb1d6a3578318c5f5e66e917 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 23 Jul 2025 19:57:22 +0200 Subject: [PATCH 28/35] Revert "Improves logging and test debug visibility" This reverts commit 22df4d8342c0ac61aba76208d0de135b9af5ca11. --- .../src/servicelib/redis/_decorators.py | 13 +------------ scripts/common-service.Makefile | 1 - .../projects/_projects_service.py | 2 +- services/web/server/tests/unit/with_dbs/conftest.py | 7 +------ 4 files changed, 3 insertions(+), 20 deletions(-) diff --git a/packages/service-library/src/servicelib/redis/_decorators.py b/packages/service-library/src/servicelib/redis/_decorators.py index 41510be69c5..b70bb931432 100644 --- a/packages/service-library/src/servicelib/redis/_decorators.py +++ b/packages/service-library/src/servicelib/redis/_decorators.py @@ -53,10 +53,6 @@ def exclusive( lock_key -- a string as the name of the lock (good practice: app_name:lock_name) lock_value -- some additional data that can be retrieved by another client if None, it will be automatically filled with the current time and the client name - blocking -- If ``blocking`` is False, always return immediately. If the lock - was acquired, return True, otherwise return False. - blocking_timeout -- specifies the maximum number of seconds to - wait trying to acquire the lock. Raises: - ValueError if used incorrectly @@ -89,13 +85,6 @@ async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: lock_value = f"locked since {arrow.utcnow().format()} by {client.client_name} on {socket.gethostname()}" lock = client.create_lock(redis_lock_key, ttl=DEFAULT_LOCK_TTL) - _logger.debug( - "Acquiring lock '%s' with value '%s' for coroutine '%s'", - redis_lock_key, - lock_value, - coro.__name__, - stacklevel=3, - ) if not await lock.acquire( token=lock_value, blocking=blocking, @@ -103,7 +92,7 @@ async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: blocking_timeout.total_seconds() if blocking_timeout else None ), ): - raise CouldNotAcquireLockError(lock=lock) # <-- HERE + raise CouldNotAcquireLockError(lock=lock) try: async with asyncio.TaskGroup() as tg: diff --git a/scripts/common-service.Makefile b/scripts/common-service.Makefile index 53ed164f95e..57fb6e3b5b4 100644 --- a/scripts/common-service.Makefile +++ b/scripts/common-service.Makefile @@ -175,7 +175,6 @@ _run-test-ci: _check_venv_active --log-date-format="%Y-%m-%d %H:%M:%S" \ --log-format="%(asctime)s %(levelname)s %(message)s" \ --verbose \ - --log-cli-level=DEBUG \ -m "not heavy_load" \ $(PYTEST_ADDITIONAL_PARAMETERS) \ $(TEST_TARGET) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 96bec0e2305..5e9a90d8af0 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -1060,7 +1060,7 @@ async def _remove_service_and_its_data_folders( ) # remove the node's data if any - await storage_service.delete_data_folders_of_project_node( # <-- MD: this one + await storage_service.delete_data_folders_of_project_node( app, f"{project_uuid}", node_uuid, user_id ) diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index 13b967d78e3..5705c4b95ca 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -417,12 +417,7 @@ async def _mock_result() -> None: return_value=TypeAdapter(ByteSize).validate_python("1Gib"), ) - return MockedStorageSubsystem( - copy_data_folders_from_project=mock, - delete_project=mock1, - delete_node=mock2, - get_project_total_size_simcore_s3=mock3, - ) + return MockedStorageSubsystem(mock, mock1, mock2, mock3) @pytest.fixture From 4d20c09e51c69482b653afeef60325093f03a852 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 24 Jul 2025 10:24:24 +0200 Subject: [PATCH 29/35] removed uv locks and pyproject --- packages/pytest-simcore/pyproject.toml | 9 - packages/pytest-simcore/uv.lock | 728 ------------------------- 2 files changed, 737 deletions(-) delete mode 100644 packages/pytest-simcore/pyproject.toml delete mode 100644 packages/pytest-simcore/uv.lock diff --git a/packages/pytest-simcore/pyproject.toml b/packages/pytest-simcore/pyproject.toml deleted file mode 100644 index 4bffe2cb6a8..00000000000 --- a/packages/pytest-simcore/pyproject.toml +++ /dev/null @@ -1,9 +0,0 @@ -[project] -name = "pytest-simcore" -version = "0.1.0" -requires-python = ">=3.11" -dependencies = [ - "fastapi[standard]>=0.115.12", - "python-socketio>=5.12.1", - "uvicorn>=0.34.0", -] diff --git a/packages/pytest-simcore/uv.lock b/packages/pytest-simcore/uv.lock deleted file mode 100644 index 57c794b678f..00000000000 --- a/packages/pytest-simcore/uv.lock +++ /dev/null @@ -1,728 +0,0 @@ -version = 1 -revision = 1 -requires-python = ">=3.11" - -[[package]] -name = "annotated-types" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, -] - -[[package]] -name = "anyio" -version = "4.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "idna" }, - { name = "sniffio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 }, -] - -[[package]] -name = "bidict" -version = "0.23.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9a/6e/026678aa5a830e07cd9498a05d3e7e650a4f56a42f267a53d22bcda1bdc9/bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71", size = 29093 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/99/37/e8730c3587a65eb5645d4aba2d27aae48e8003614d6aaf15dda67f702f1f/bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5", size = 32764 }, -] - -[[package]] -name = "certifi" -version = "2025.1.31" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393 }, -] - -[[package]] -name = "click" -version = "8.1.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, -] - -[[package]] -name = "dnspython" -version = "2.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632 }, -] - -[[package]] -name = "email-validator" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "dnspython" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521 }, -] - -[[package]] -name = "fastapi" -version = "0.115.12" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pydantic" }, - { name = "starlette" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164 }, -] - -[package.optional-dependencies] -standard = [ - { name = "email-validator" }, - { name = "fastapi-cli", extra = ["standard"] }, - { name = "httpx" }, - { name = "jinja2" }, - { name = "python-multipart" }, - { name = "uvicorn", extra = ["standard"] }, -] - -[[package]] -name = "fastapi-cli" -version = "0.0.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "rich-toolkit" }, - { name = "typer" }, - { name = "uvicorn", extra = ["standard"] }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fe/73/82a5831fbbf8ed75905bacf5b2d9d3dfd6f04d6968b29fe6f72a5ae9ceb1/fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e", size = 16753 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/e6/5daefc851b514ce2287d8f5d358ae4341089185f78f3217a69d0ce3a390c/fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4", size = 10705 }, -] - -[package.optional-dependencies] -standard = [ - { name = "uvicorn", extra = ["standard"] }, -] - -[[package]] -name = "h11" -version = "0.14.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, -] - -[[package]] -name = "httpcore" -version = "1.0.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 }, -] - -[[package]] -name = "httptools" -version = "0.6.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069", size = 199029 }, - { url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a", size = 103492 }, - { url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975", size = 462891 }, - { url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636", size = 459788 }, - { url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721", size = 433214 }, - { url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988", size = 434120 }, - { url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17", size = 88565 }, - { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683 }, - { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337 }, - { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796 }, - { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837 }, - { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289 }, - { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779 }, - { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634 }, - { url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214 }, - { url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431 }, - { url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121 }, - { url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805 }, - { url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858 }, - { url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042 }, - { url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682 }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, -] - -[[package]] -name = "idna" -version = "3.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, -] - -[[package]] -name = "jinja2" -version = "3.1.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mdurl" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, -] - -[[package]] -name = "markupsafe" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353 }, - { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392 }, - { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984 }, - { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120 }, - { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032 }, - { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057 }, - { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359 }, - { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306 }, - { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094 }, - { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521 }, - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, -] - -[[package]] -name = "pydantic" -version = "2.11.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "annotated-types" }, - { name = "pydantic-core" }, - { name = "typing-extensions" }, - { name = "typing-inspection" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/10/2e/ca897f093ee6c5f3b0bee123ee4465c50e75431c3d5b6a3b44a47134e891/pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3", size = 785513 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/1d/407b29780a289868ed696d1616f4aad49d6388e5a77f567dcd2629dcd7b8/pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f", size = 443591 }, -] - -[[package]] -name = "pydantic-core" -version = "2.33.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/17/19/ed6a078a5287aea7922de6841ef4c06157931622c89c2a47940837b5eecd/pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df", size = 434395 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/7f/c6298830cb780c46b4f46bb24298d01019ffa4d21769f39b908cd14bbd50/pydantic_core-2.33.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e966fc3caaf9f1d96b349b0341c70c8d6573bf1bac7261f7b0ba88f96c56c24", size = 2044224 }, - { url = "https://files.pythonhosted.org/packages/a8/65/6ab3a536776cad5343f625245bd38165d6663256ad43f3a200e5936afd6c/pydantic_core-2.33.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30", size = 1858845 }, - { url = "https://files.pythonhosted.org/packages/e9/15/9a22fd26ba5ee8c669d4b8c9c244238e940cd5d818649603ca81d1c69861/pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91815221101ad3c6b507804178a7bb5cb7b2ead9ecd600041669c8d805ebd595", size = 1910029 }, - { url = "https://files.pythonhosted.org/packages/d5/33/8cb1a62818974045086f55f604044bf35b9342900318f9a2a029a1bec460/pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9fea9c1869bb4742d174a57b4700c6dadea951df8b06de40c2fedb4f02931c2e", size = 1997784 }, - { url = "https://files.pythonhosted.org/packages/c0/ca/49958e4df7715c71773e1ea5be1c74544923d10319173264e6db122543f9/pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d20eb4861329bb2484c021b9d9a977566ab16d84000a57e28061151c62b349a", size = 2141075 }, - { url = "https://files.pythonhosted.org/packages/7b/a6/0b3a167a9773c79ba834b959b4e18c3ae9216b8319bd8422792abc8a41b1/pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb935c5591573ae3201640579f30128ccc10739b45663f93c06796854405505", size = 2745849 }, - { url = "https://files.pythonhosted.org/packages/0b/60/516484135173aa9e5861d7a0663dce82e4746d2e7f803627d8c25dfa5578/pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c964fd24e6166420d18fb53996d8c9fd6eac9bf5ae3ec3d03015be4414ce497f", size = 2005794 }, - { url = "https://files.pythonhosted.org/packages/86/70/05b1eb77459ad47de00cf78ee003016da0cedf8b9170260488d7c21e9181/pydantic_core-2.33.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:681d65e9011f7392db5aa002b7423cc442d6a673c635668c227c6c8d0e5a4f77", size = 2123237 }, - { url = "https://files.pythonhosted.org/packages/c7/57/12667a1409c04ae7dc95d3b43158948eb0368e9c790be8b095cb60611459/pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e100c52f7355a48413e2999bfb4e139d2977a904495441b374f3d4fb4a170961", size = 2086351 }, - { url = "https://files.pythonhosted.org/packages/57/61/cc6d1d1c1664b58fdd6ecc64c84366c34ec9b606aeb66cafab6f4088974c/pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:048831bd363490be79acdd3232f74a0e9951b11b2b4cc058aeb72b22fdc3abe1", size = 2258914 }, - { url = "https://files.pythonhosted.org/packages/d1/0a/edb137176a1f5419b2ddee8bde6a0a548cfa3c74f657f63e56232df8de88/pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bdc84017d28459c00db6f918a7272a5190bec3090058334e43a76afb279eac7c", size = 2257385 }, - { url = "https://files.pythonhosted.org/packages/26/3c/48ca982d50e4b0e1d9954919c887bdc1c2b462801bf408613ccc641b3daa/pydantic_core-2.33.1-cp311-cp311-win32.whl", hash = "sha256:32cd11c5914d1179df70406427097c7dcde19fddf1418c787540f4b730289896", size = 1923765 }, - { url = "https://files.pythonhosted.org/packages/33/cd/7ab70b99e5e21559f5de38a0928ea84e6f23fdef2b0d16a6feaf942b003c/pydantic_core-2.33.1-cp311-cp311-win_amd64.whl", hash = "sha256:2ea62419ba8c397e7da28a9170a16219d310d2cf4970dbc65c32faf20d828c83", size = 1950688 }, - { url = "https://files.pythonhosted.org/packages/4b/ae/db1fc237b82e2cacd379f63e3335748ab88b5adde98bf7544a1b1bd10a84/pydantic_core-2.33.1-cp311-cp311-win_arm64.whl", hash = "sha256:fc903512177361e868bc1f5b80ac8c8a6e05fcdd574a5fb5ffeac5a9982b9e89", size = 1908185 }, - { url = "https://files.pythonhosted.org/packages/c8/ce/3cb22b07c29938f97ff5f5bb27521f95e2ebec399b882392deb68d6c440e/pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8", size = 2026640 }, - { url = "https://files.pythonhosted.org/packages/19/78/f381d643b12378fee782a72126ec5d793081ef03791c28a0fd542a5bee64/pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498", size = 1852649 }, - { url = "https://files.pythonhosted.org/packages/9d/2b/98a37b80b15aac9eb2c6cfc6dbd35e5058a352891c5cce3a8472d77665a6/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939", size = 1892472 }, - { url = "https://files.pythonhosted.org/packages/4e/d4/3c59514e0f55a161004792b9ff3039da52448f43f5834f905abef9db6e4a/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d", size = 1977509 }, - { url = "https://files.pythonhosted.org/packages/a9/b6/c2c7946ef70576f79a25db59a576bce088bdc5952d1b93c9789b091df716/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e", size = 2128702 }, - { url = "https://files.pythonhosted.org/packages/88/fe/65a880f81e3f2a974312b61f82a03d85528f89a010ce21ad92f109d94deb/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3", size = 2679428 }, - { url = "https://files.pythonhosted.org/packages/6f/ff/4459e4146afd0462fb483bb98aa2436d69c484737feaceba1341615fb0ac/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d", size = 2008753 }, - { url = "https://files.pythonhosted.org/packages/7c/76/1c42e384e8d78452ededac8b583fe2550c84abfef83a0552e0e7478ccbc3/pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b", size = 2114849 }, - { url = "https://files.pythonhosted.org/packages/00/72/7d0cf05095c15f7ffe0eb78914b166d591c0eed72f294da68378da205101/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39", size = 2069541 }, - { url = "https://files.pythonhosted.org/packages/b3/69/94a514066bb7d8be499aa764926937409d2389c09be0b5107a970286ef81/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a", size = 2239225 }, - { url = "https://files.pythonhosted.org/packages/84/b0/e390071eadb44b41f4f54c3cef64d8bf5f9612c92686c9299eaa09e267e2/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db", size = 2248373 }, - { url = "https://files.pythonhosted.org/packages/d6/b2/288b3579ffc07e92af66e2f1a11be3b056fe1214aab314748461f21a31c3/pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda", size = 1907034 }, - { url = "https://files.pythonhosted.org/packages/02/28/58442ad1c22b5b6742b992ba9518420235adced665513868f99a1c2638a5/pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4", size = 1956848 }, - { url = "https://files.pythonhosted.org/packages/a1/eb/f54809b51c7e2a1d9f439f158b8dd94359321abcc98767e16fc48ae5a77e/pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea", size = 1903986 }, - { url = "https://files.pythonhosted.org/packages/7a/24/eed3466a4308d79155f1cdd5c7432c80ddcc4530ba8623b79d5ced021641/pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a", size = 2033551 }, - { url = "https://files.pythonhosted.org/packages/ab/14/df54b1a0bc9b6ded9b758b73139d2c11b4e8eb43e8ab9c5847c0a2913ada/pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266", size = 1852785 }, - { url = "https://files.pythonhosted.org/packages/fa/96/e275f15ff3d34bb04b0125d9bc8848bf69f25d784d92a63676112451bfb9/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3", size = 1897758 }, - { url = "https://files.pythonhosted.org/packages/b7/d8/96bc536e975b69e3a924b507d2a19aedbf50b24e08c80fb00e35f9baaed8/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a", size = 1986109 }, - { url = "https://files.pythonhosted.org/packages/90/72/ab58e43ce7e900b88cb571ed057b2fcd0e95b708a2e0bed475b10130393e/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516", size = 2129159 }, - { url = "https://files.pythonhosted.org/packages/dc/3f/52d85781406886c6870ac995ec0ba7ccc028b530b0798c9080531b409fdb/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764", size = 2680222 }, - { url = "https://files.pythonhosted.org/packages/f4/56/6e2ef42f363a0eec0fd92f74a91e0ac48cd2e49b695aac1509ad81eee86a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d", size = 2006980 }, - { url = "https://files.pythonhosted.org/packages/4c/c0/604536c4379cc78359f9ee0aa319f4aedf6b652ec2854953f5a14fc38c5a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4", size = 2120840 }, - { url = "https://files.pythonhosted.org/packages/1f/46/9eb764814f508f0edfb291a0f75d10854d78113fa13900ce13729aaec3ae/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde", size = 2072518 }, - { url = "https://files.pythonhosted.org/packages/42/e3/fb6b2a732b82d1666fa6bf53e3627867ea3131c5f39f98ce92141e3e3dc1/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e", size = 2248025 }, - { url = "https://files.pythonhosted.org/packages/5c/9d/fbe8fe9d1aa4dac88723f10a921bc7418bd3378a567cb5e21193a3c48b43/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd", size = 2254991 }, - { url = "https://files.pythonhosted.org/packages/aa/99/07e2237b8a66438d9b26482332cda99a9acccb58d284af7bc7c946a42fd3/pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f", size = 1915262 }, - { url = "https://files.pythonhosted.org/packages/8a/f4/e457a7849beeed1e5defbcf5051c6f7b3c91a0624dd31543a64fc9adcf52/pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40", size = 1956626 }, - { url = "https://files.pythonhosted.org/packages/20/d0/e8d567a7cff7b04e017ae164d98011f1e1894269fe8e90ea187a3cbfb562/pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523", size = 1909590 }, - { url = "https://files.pythonhosted.org/packages/ef/fd/24ea4302d7a527d672c5be06e17df16aabfb4e9fdc6e0b345c21580f3d2a/pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d", size = 1812963 }, - { url = "https://files.pythonhosted.org/packages/5f/95/4fbc2ecdeb5c1c53f1175a32d870250194eb2fdf6291b795ab08c8646d5d/pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c", size = 1986896 }, - { url = "https://files.pythonhosted.org/packages/71/ae/fe31e7f4a62431222d8f65a3bd02e3fa7e6026d154a00818e6d30520ea77/pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18", size = 1931810 }, - { url = "https://files.pythonhosted.org/packages/0b/76/1794e440c1801ed35415238d2c728f26cd12695df9057154ad768b7b991c/pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3a371dc00282c4b84246509a5ddc808e61b9864aa1eae9ecc92bb1268b82db4a", size = 2042858 }, - { url = "https://files.pythonhosted.org/packages/73/b4/9cd7b081fb0b1b4f8150507cd59d27b275c3e22ad60b35cb19ea0977d9b9/pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f59295ecc75a1788af8ba92f2e8c6eeaa5a94c22fc4d151e8d9638814f85c8fc", size = 1873745 }, - { url = "https://files.pythonhosted.org/packages/e1/d7/9ddb7575d4321e40d0363903c2576c8c0c3280ebea137777e5ab58d723e3/pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08530b8ac922003033f399128505f513e30ca770527cc8bbacf75a84fcc2c74b", size = 1904188 }, - { url = "https://files.pythonhosted.org/packages/d1/a8/3194ccfe461bb08da19377ebec8cb4f13c9bd82e13baebc53c5c7c39a029/pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae370459da6a5466978c0eacf90690cb57ec9d533f8e63e564ef3822bfa04fe", size = 2083479 }, - { url = "https://files.pythonhosted.org/packages/42/c7/84cb569555d7179ca0b3f838cef08f66f7089b54432f5b8599aac6e9533e/pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3de2777e3b9f4d603112f78006f4ae0acb936e95f06da6cb1a45fbad6bdb4b5", size = 2118415 }, - { url = "https://files.pythonhosted.org/packages/3b/67/72abb8c73e0837716afbb58a59cc9e3ae43d1aa8677f3b4bc72c16142716/pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a64e81e8cba118e108d7126362ea30e021291b7805d47e4896e52c791be2761", size = 2079623 }, - { url = "https://files.pythonhosted.org/packages/0b/cd/c59707e35a47ba4cbbf153c3f7c56420c58653b5801b055dc52cccc8e2dc/pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:52928d8c1b6bda03cc6d811e8923dffc87a2d3c8b3bfd2ce16471c7147a24850", size = 2250175 }, - { url = "https://files.pythonhosted.org/packages/84/32/e4325a6676b0bed32d5b084566ec86ed7fd1e9bcbfc49c578b1755bde920/pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1b30d92c9412beb5ac6b10a3eb7ef92ccb14e3f2a8d7732e2d739f58b3aa7544", size = 2254674 }, - { url = "https://files.pythonhosted.org/packages/12/6f/5596dc418f2e292ffc661d21931ab34591952e2843e7168ea5a52591f6ff/pydantic_core-2.33.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f995719707e0e29f0f41a8aa3bcea6e761a36c9136104d3189eafb83f5cec5e5", size = 2080951 }, -] - -[[package]] -name = "pygments" -version = "2.19.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 }, -] - -[[package]] -name = "pytest-simcore" -version = "0.1.0" -source = { virtual = "." } -dependencies = [ - { name = "fastapi", extra = ["standard"] }, - { name = "python-socketio" }, - { name = "uvicorn" }, -] - -[package.metadata] -requires-dist = [ - { name = "fastapi", extras = ["standard"], specifier = ">=0.115.12" }, - { name = "python-socketio", specifier = ">=5.12.1" }, - { name = "uvicorn", specifier = ">=0.34.0" }, -] - -[[package]] -name = "python-dotenv" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256 }, -] - -[[package]] -name = "python-engineio" -version = "4.11.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "simple-websocket" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/52/e0/a9e0fe427ce7f1b7dbf9531fa00ffe4b557c4a7bc8e71891c115af123170/python_engineio-4.11.2.tar.gz", hash = "sha256:145bb0daceb904b4bb2d3eb2d93f7dbb7bb87a6a0c4f20a94cc8654dec977129", size = 91381 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/8f/978a0b913e3f8ad33a9a2fe204d32efe3d1ee34ecb1f2829c1cfbdd92082/python_engineio-4.11.2-py3-none-any.whl", hash = "sha256:f0971ac4c65accc489154fe12efd88f53ca8caf04754c46a66e85f5102ef22ad", size = 59239 }, -] - -[[package]] -name = "python-multipart" -version = "0.0.20" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 }, -] - -[[package]] -name = "python-socketio" -version = "5.12.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "bidict" }, - { name = "python-engineio" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ce/d0/40ed38076e8aee94785d546d3e3a1cae393da5806a8530be877187e2875f/python_socketio-5.12.1.tar.gz", hash = "sha256:0299ff1f470b676c09c1bfab1dead25405077d227b2c13cf217a34dadc68ba9c", size = 119991 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/a3/c69806f30dd81df5a99d592e7db4c930c3a9b098555aa97b0eb866b20b11/python_socketio-5.12.1-py3-none-any.whl", hash = "sha256:24a0ea7cfff0e021eb28c68edbf7914ee4111bdf030b95e4d250c4dc9af7a386", size = 76947 }, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, - { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, - { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, - { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, - { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, - { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, - { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, - { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, - { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, -] - -[[package]] -name = "rich" -version = "14.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markdown-it-py" }, - { name = "pygments" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229 }, -] - -[[package]] -name = "rich-toolkit" -version = "0.14.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "rich" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2e/ea/13945d58d556a28dfb0f774ad5c8af759527390e59505a40d164bf8ce1ce/rich_toolkit-0.14.1.tar.gz", hash = "sha256:9248e2d087bfc01f3e4c5c8987e05f7fa744d00dd22fa2be3aa6e50255790b3f", size = 104416 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/66/e8/61c5b12d1567fdba41a6775db12a090d88b8305424ee7c47259c70d33cb4/rich_toolkit-0.14.1-py3-none-any.whl", hash = "sha256:dc92c0117d752446d04fdc828dbca5873bcded213a091a5d3742a2beec2e6559", size = 24177 }, -] - -[[package]] -name = "shellingham" -version = "1.5.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 }, -] - -[[package]] -name = "simple-websocket" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "wsproto" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b0/d4/bfa032f961103eba93de583b161f0e6a5b63cebb8f2c7d0c6e6efe1e3d2e/simple_websocket-1.1.0.tar.gz", hash = "sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4", size = 17300 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/52/59/0782e51887ac6b07ffd1570e0364cf901ebc36345fea669969d2084baebb/simple_websocket-1.1.0-py3-none-any.whl", hash = "sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c", size = 13842 }, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, -] - -[[package]] -name = "starlette" -version = "0.46.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/04/1b/52b27f2e13ceedc79a908e29eac426a63465a1a01248e5f24aa36a62aeb3/starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230", size = 2580102 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/4b/528ccf7a982216885a1ff4908e886b8fb5f19862d1962f56a3fce2435a70/starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227", size = 71995 }, -] - -[[package]] -name = "typer" -version = "0.15.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "rich" }, - { name = "shellingham" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8b/6f/3991f0f1c7fcb2df31aef28e0594d8d54b05393a0e4e34c65e475c2a5d41/typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5", size = 100711 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/fc/5b29fea8cee020515ca82cc68e3b8e1e34bb19a3535ad854cac9257b414c/typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc", size = 45061 }, -] - -[[package]] -name = "typing-extensions" -version = "4.13.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806 }, -] - -[[package]] -name = "typing-inspection" -version = "0.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125 }, -] - -[[package]] -name = "uvicorn" -version = "0.34.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 }, -] - -[package.optional-dependencies] -standard = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "httptools" }, - { name = "python-dotenv" }, - { name = "pyyaml" }, - { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, - { name = "watchfiles" }, - { name = "websockets" }, -] - -[[package]] -name = "uvloop" -version = "0.21.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8", size = 1447410 }, - { url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0", size = 805476 }, - { url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e", size = 3960855 }, - { url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb", size = 3973185 }, - { url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6", size = 3820256 }, - { url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d", size = 3937323 }, - { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284 }, - { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349 }, - { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089 }, - { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770 }, - { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321 }, - { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022 }, - { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123 }, - { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325 }, - { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806 }, - { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068 }, - { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428 }, - { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018 }, -] - -[[package]] -name = "watchfiles" -version = "1.0.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/03/e2/8ed598c42057de7aa5d97c472254af4906ff0a59a66699d426fc9ef795d7/watchfiles-1.0.5.tar.gz", hash = "sha256:b7529b5dcc114679d43827d8c35a07c493ad6f083633d573d81c660abc5979e9", size = 94537 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/39/f4/41b591f59021786ef517e1cdc3b510383551846703e03f204827854a96f8/watchfiles-1.0.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:237f9be419e977a0f8f6b2e7b0475ababe78ff1ab06822df95d914a945eac827", size = 405336 }, - { url = "https://files.pythonhosted.org/packages/ae/06/93789c135be4d6d0e4f63e96eea56dc54050b243eacc28439a26482b5235/watchfiles-1.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0da39ff917af8b27a4bdc5a97ac577552a38aac0d260a859c1517ea3dc1a7c4", size = 395977 }, - { url = "https://files.pythonhosted.org/packages/d2/db/1cd89bd83728ca37054512d4d35ab69b5f12b8aa2ac9be3b0276b3bf06cc/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cfcb3952350e95603f232a7a15f6c5f86c5375e46f0bd4ae70d43e3e063c13d", size = 455232 }, - { url = "https://files.pythonhosted.org/packages/40/90/d8a4d44ffe960517e487c9c04f77b06b8abf05eb680bed71c82b5f2cad62/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68b2dddba7a4e6151384e252a5632efcaa9bc5d1c4b567f3cb621306b2ca9f63", size = 459151 }, - { url = "https://files.pythonhosted.org/packages/6c/da/267a1546f26465dead1719caaba3ce660657f83c9d9c052ba98fb8856e13/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95cf944fcfc394c5f9de794ce581914900f82ff1f855326f25ebcf24d5397418", size = 489054 }, - { url = "https://files.pythonhosted.org/packages/b1/31/33850dfd5c6efb6f27d2465cc4c6b27c5a6f5ed53c6fa63b7263cf5f60f6/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf6cd9f83d7c023b1aba15d13f705ca7b7d38675c121f3cc4a6e25bd0857ee9", size = 523955 }, - { url = "https://files.pythonhosted.org/packages/09/84/b7d7b67856efb183a421f1416b44ca975cb2ea6c4544827955dfb01f7dc2/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:852de68acd6212cd6d33edf21e6f9e56e5d98c6add46f48244bd479d97c967c6", size = 502234 }, - { url = "https://files.pythonhosted.org/packages/71/87/6dc5ec6882a2254cfdd8b0718b684504e737273903b65d7338efaba08b52/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5730f3aa35e646103b53389d5bc77edfbf578ab6dab2e005142b5b80a35ef25", size = 454750 }, - { url = "https://files.pythonhosted.org/packages/3d/6c/3786c50213451a0ad15170d091570d4a6554976cf0df19878002fc96075a/watchfiles-1.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:18b3bd29954bc4abeeb4e9d9cf0b30227f0f206c86657674f544cb032296acd5", size = 631591 }, - { url = "https://files.pythonhosted.org/packages/1b/b3/1427425ade4e359a0deacce01a47a26024b2ccdb53098f9d64d497f6684c/watchfiles-1.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ba5552a1b07c8edbf197055bc9d518b8f0d98a1c6a73a293bc0726dce068ed01", size = 625370 }, - { url = "https://files.pythonhosted.org/packages/15/ba/f60e053b0b5b8145d682672024aa91370a29c5c921a88977eb565de34086/watchfiles-1.0.5-cp311-cp311-win32.whl", hash = "sha256:2f1fefb2e90e89959447bc0420fddd1e76f625784340d64a2f7d5983ef9ad246", size = 277791 }, - { url = "https://files.pythonhosted.org/packages/50/ed/7603c4e164225c12c0d4e8700b64bb00e01a6c4eeea372292a3856be33a4/watchfiles-1.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:b6e76ceb1dd18c8e29c73f47d41866972e891fc4cc7ba014f487def72c1cf096", size = 291622 }, - { url = "https://files.pythonhosted.org/packages/a2/c2/99bb7c96b4450e36877fde33690ded286ff555b5a5c1d925855d556968a1/watchfiles-1.0.5-cp311-cp311-win_arm64.whl", hash = "sha256:266710eb6fddc1f5e51843c70e3bebfb0f5e77cf4f27129278c70554104d19ed", size = 283699 }, - { url = "https://files.pythonhosted.org/packages/2a/8c/4f0b9bdb75a1bfbd9c78fad7d8854369283f74fe7cf03eb16be77054536d/watchfiles-1.0.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5eb568c2aa6018e26da9e6c86f3ec3fd958cee7f0311b35c2630fa4217d17f2", size = 401511 }, - { url = "https://files.pythonhosted.org/packages/dc/4e/7e15825def77f8bd359b6d3f379f0c9dac4eb09dd4ddd58fd7d14127179c/watchfiles-1.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0a04059f4923ce4e856b4b4e5e783a70f49d9663d22a4c3b3298165996d1377f", size = 392715 }, - { url = "https://files.pythonhosted.org/packages/58/65/b72fb817518728e08de5840d5d38571466c1b4a3f724d190cec909ee6f3f/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e380c89983ce6e6fe2dd1e1921b9952fb4e6da882931abd1824c092ed495dec", size = 454138 }, - { url = "https://files.pythonhosted.org/packages/3e/a4/86833fd2ea2e50ae28989f5950b5c3f91022d67092bfec08f8300d8b347b/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fe43139b2c0fdc4a14d4f8d5b5d967f7a2777fd3d38ecf5b1ec669b0d7e43c21", size = 458592 }, - { url = "https://files.pythonhosted.org/packages/38/7e/42cb8df8be9a37e50dd3a818816501cf7a20d635d76d6bd65aae3dbbff68/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee0822ce1b8a14fe5a066f93edd20aada932acfe348bede8aa2149f1a4489512", size = 487532 }, - { url = "https://files.pythonhosted.org/packages/fc/fd/13d26721c85d7f3df6169d8b495fcac8ab0dc8f0945ebea8845de4681dab/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0dbcb1c2d8f2ab6e0a81c6699b236932bd264d4cef1ac475858d16c403de74d", size = 522865 }, - { url = "https://files.pythonhosted.org/packages/a1/0d/7f9ae243c04e96c5455d111e21b09087d0eeaf9a1369e13a01c7d3d82478/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2014a2b18ad3ca53b1f6c23f8cd94a18ce930c1837bd891262c182640eb40a6", size = 499887 }, - { url = "https://files.pythonhosted.org/packages/8e/0f/a257766998e26aca4b3acf2ae97dff04b57071e991a510857d3799247c67/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f6ae86d5cb647bf58f9f655fcf577f713915a5d69057a0371bc257e2553234", size = 454498 }, - { url = "https://files.pythonhosted.org/packages/81/79/8bf142575a03e0af9c3d5f8bcae911ee6683ae93a625d349d4ecf4c8f7df/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1a7bac2bde1d661fb31f4d4e8e539e178774b76db3c2c17c4bb3e960a5de07a2", size = 630663 }, - { url = "https://files.pythonhosted.org/packages/f1/80/abe2e79f610e45c63a70d271caea90c49bbf93eb00fa947fa9b803a1d51f/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ab626da2fc1ac277bbf752446470b367f84b50295264d2d313e28dc4405d663", size = 625410 }, - { url = "https://files.pythonhosted.org/packages/91/6f/bc7fbecb84a41a9069c2c6eb6319f7f7df113adf113e358c57fc1aff7ff5/watchfiles-1.0.5-cp312-cp312-win32.whl", hash = "sha256:9f4571a783914feda92018ef3901dab8caf5b029325b5fe4558c074582815249", size = 277965 }, - { url = "https://files.pythonhosted.org/packages/99/a5/bf1c297ea6649ec59e935ab311f63d8af5faa8f0b86993e3282b984263e3/watchfiles-1.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:360a398c3a19672cf93527f7e8d8b60d8275119c5d900f2e184d32483117a705", size = 291693 }, - { url = "https://files.pythonhosted.org/packages/7f/7b/fd01087cc21db5c47e5beae507b87965db341cce8a86f9eb12bf5219d4e0/watchfiles-1.0.5-cp312-cp312-win_arm64.whl", hash = "sha256:1a2902ede862969077b97523987c38db28abbe09fb19866e711485d9fbf0d417", size = 283287 }, - { url = "https://files.pythonhosted.org/packages/c7/62/435766874b704f39b2fecd8395a29042db2b5ec4005bd34523415e9bd2e0/watchfiles-1.0.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0b289572c33a0deae62daa57e44a25b99b783e5f7aed81b314232b3d3c81a11d", size = 401531 }, - { url = "https://files.pythonhosted.org/packages/6e/a6/e52a02c05411b9cb02823e6797ef9bbba0bfaf1bb627da1634d44d8af833/watchfiles-1.0.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a056c2f692d65bf1e99c41045e3bdcaea3cb9e6b5a53dcaf60a5f3bd95fc9763", size = 392417 }, - { url = "https://files.pythonhosted.org/packages/3f/53/c4af6819770455932144e0109d4854437769672d7ad897e76e8e1673435d/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9dca99744991fc9850d18015c4f0438865414e50069670f5f7eee08340d8b40", size = 453423 }, - { url = "https://files.pythonhosted.org/packages/cb/d1/8e88df58bbbf819b8bc5cfbacd3c79e01b40261cad0fc84d1e1ebd778a07/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:894342d61d355446d02cd3988a7326af344143eb33a2fd5d38482a92072d9563", size = 458185 }, - { url = "https://files.pythonhosted.org/packages/ff/70/fffaa11962dd5429e47e478a18736d4e42bec42404f5ee3b92ef1b87ad60/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab44e1580924d1ffd7b3938e02716d5ad190441965138b4aa1d1f31ea0877f04", size = 486696 }, - { url = "https://files.pythonhosted.org/packages/39/db/723c0328e8b3692d53eb273797d9a08be6ffb1d16f1c0ba2bdbdc2a3852c/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6f9367b132078b2ceb8d066ff6c93a970a18c3029cea37bfd7b2d3dd2e5db8f", size = 522327 }, - { url = "https://files.pythonhosted.org/packages/cd/05/9fccc43c50c39a76b68343484b9da7b12d42d0859c37c61aec018c967a32/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2e55a9b162e06e3f862fb61e399fe9f05d908d019d87bf5b496a04ef18a970a", size = 499741 }, - { url = "https://files.pythonhosted.org/packages/23/14/499e90c37fa518976782b10a18b18db9f55ea73ca14641615056f8194bb3/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0125f91f70e0732a9f8ee01e49515c35d38ba48db507a50c5bdcad9503af5827", size = 453995 }, - { url = "https://files.pythonhosted.org/packages/61/d9/f75d6840059320df5adecd2c687fbc18960a7f97b55c300d20f207d48aef/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:13bb21f8ba3248386337c9fa51c528868e6c34a707f729ab041c846d52a0c69a", size = 629693 }, - { url = "https://files.pythonhosted.org/packages/fc/17/180ca383f5061b61406477218c55d66ec118e6c0c51f02d8142895fcf0a9/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:839ebd0df4a18c5b3c1b890145b5a3f5f64063c2a0d02b13c76d78fe5de34936", size = 624677 }, - { url = "https://files.pythonhosted.org/packages/bf/15/714d6ef307f803f236d69ee9d421763707899d6298d9f3183e55e366d9af/watchfiles-1.0.5-cp313-cp313-win32.whl", hash = "sha256:4a8ec1e4e16e2d5bafc9ba82f7aaecfeec990ca7cd27e84fb6f191804ed2fcfc", size = 277804 }, - { url = "https://files.pythonhosted.org/packages/a8/b4/c57b99518fadf431f3ef47a610839e46e5f8abf9814f969859d1c65c02c7/watchfiles-1.0.5-cp313-cp313-win_amd64.whl", hash = "sha256:f436601594f15bf406518af922a89dcaab416568edb6f65c4e5bbbad1ea45c11", size = 291087 }, -] - -[[package]] -name = "websockets" -version = "15.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423 }, - { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082 }, - { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330 }, - { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878 }, - { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883 }, - { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252 }, - { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521 }, - { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958 }, - { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918 }, - { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388 }, - { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828 }, - { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437 }, - { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096 }, - { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332 }, - { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152 }, - { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096 }, - { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523 }, - { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790 }, - { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165 }, - { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160 }, - { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395 }, - { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841 }, - { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440 }, - { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098 }, - { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329 }, - { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111 }, - { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054 }, - { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496 }, - { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829 }, - { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217 }, - { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195 }, - { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393 }, - { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837 }, - { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743 }, -] - -[[package]] -name = "wsproto" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c9/4a/44d3c295350d776427904d73c189e10aeae66d7f555bb2feee16d1e4ba5a/wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065", size = 53425 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/58/e860788190eba3bcce367f74d29c4675466ce8dddfba85f7827588416f01/wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736", size = 24226 }, -] From c5e7bbec343a4b1521fcf2e7bdc846c38b32bd67 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 24 Jul 2025 10:50:43 +0200 Subject: [PATCH 30/35] increase delay --- .../tests/unit/with_dbs/02/test_projects_cancellations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py index 2e92800de64..c94efff772a 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py @@ -134,7 +134,7 @@ async def test_copying_large_project_and_aborting_correctly_removes_new_project( await assert_status(resp, expected.no_content) # wait to check that the call to storage is "done" async for attempt in AsyncRetrying( - reraise=True, stop=stop_after_delay(10), wait=wait_fixed(1) + reraise=True, stop=stop_after_delay(60), wait=wait_fixed(1) ): with attempt: slow_storage_subsystem_mock.delete_project.assert_called_once() From e0d628291fd4e1f036b4b4d6f077f59ca6a6ba58 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 24 Jul 2025 12:40:53 +0200 Subject: [PATCH 31/35] fix test createing/deleteing nodes in high throuput conditions --- .../projects/_projects_repository_legacy.py | 47 +++++++++++-------- .../projects/_projects_service.py | 24 ++++++---- 2 files changed, 42 insertions(+), 29 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py index 74c062f6589..6e73f5e59c7 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py @@ -5,7 +5,6 @@ """ -import datetime import logging from contextlib import AsyncExitStack from typing import Any, Self, cast @@ -449,7 +448,6 @@ def _create_shared_workspace_query( is_search_by_multi_columns: bool, user_groups: list[GroupID], ) -> sql.Select | None: - if workspace_query.workspace_scope is not WorkspaceScope.PRIVATE: assert workspace_query.workspace_scope in ( # nosec WorkspaceScope.SHARED, @@ -951,16 +949,24 @@ async def _update_project_workbench_with_lock_and_notify( thread-safe operations on the project document. """ + # Get user's primary group ID for notification + async with self.engine.acquire() as conn: + user_primary_gid = await self._get_user_primary_group_gid(conn, user_id) + + # 10 concurrent calls @exclusive( get_redis_lock_manager_client_sdk(self._app), lock_key=PROJECT_DB_UPDATE_REDIS_LOCK_KEY.format(project_uuid), blocking=True, - blocking_timeout=datetime.timedelta(seconds=30), + blocking_timeout=None, # NOTE: this is a blocking call, a timeout has undefined effects ) async def _update_workbench_and_notify() -> ( - tuple[ProjectDict, dict[NodeIDStr, Any]] + tuple[ProjectDict, dict[NodeIDStr, Any], ProjectDocument, int] ): - # Update the workbench + """This function is protected because + - the project document and its version must be kept in sync + """ + # Update the workbench work since it's atomic updated_project, changed_entries = await self._update_project_workbench( partial_workbench_data, user_id=user_id, @@ -968,10 +974,7 @@ async def _update_workbench_and_notify() -> ( product_name=product_name, allow_workbench_changes=allow_workbench_changes, ) - - # Get user's primary group ID for notification - async with self.engine.acquire() as conn: - user_primary_gid = await self._get_user_primary_group_gid(conn, user_id) + # the update project with last_modified timestamp latest is the last # Get the full project with workbench for document creation project_with_workbench = ( @@ -1004,17 +1007,23 @@ async def _update_workbench_and_notify() -> ( document_version = await increment_and_return_project_document_version( redis_client=redis_client_sdk, project_uuid=project_uuid ) - await notify_project_document_updated( - app=self._app, - project_id=project_uuid, - user_primary_gid=user_primary_gid, - version=document_version, - document=project_document, - ) - return updated_project, changed_entries - - return await _update_workbench_and_notify() + return updated_project, changed_entries, project_document, document_version + + ( + updated_project, + changed_entries, + project_document, + document_version, + ) = await _update_workbench_and_notify() + await notify_project_document_updated( + app=self._app, + project_id=project_uuid, + user_primary_gid=user_primary_gid, + version=document_version, + document=project_document, + ) + return updated_project, changed_entries async def _update_project_workbench( self, diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 5e9a90d8af0..04317b70598 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -207,9 +207,12 @@ async def patch_project_and_notify_users( get_redis_lock_manager_client_sdk(app), lock_key=PROJECT_DB_UPDATE_REDIS_LOCK_KEY.format(project_uuid), blocking=True, - blocking_timeout=datetime.timedelta(seconds=30), + blocking_timeout=None, # NOTE: this is a blocking call, a timeout has undefined effects ) - async def _patch_and_notify() -> None: + async def _patch_and_create_project_document() -> tuple[ProjectDocument, int]: + """This function is protected because + - the project document and its version must be kept in sync + """ await _projects_repository.patch_project( app=app, project_uuid=project_uuid, @@ -239,15 +242,16 @@ async def _patch_and_notify() -> None: document_version = await increment_and_return_project_document_version( redis_client=redis_client_sdk, project_uuid=project_uuid ) - await notify_project_document_updated( - app=app, - project_id=project_uuid, - user_primary_gid=user_primary_gid, - version=document_version, - document=project_document, - ) + return project_document, document_version - await _patch_and_notify() + project_document, document_version = await _patch_and_create_project_document() + await notify_project_document_updated( + app=app, + project_id=project_uuid, + user_primary_gid=user_primary_gid, + version=document_version, + document=project_document, + ) def _is_node_dynamic(node_key: str) -> bool: From 186d976254f75723a6a8b12925941dfda4c4a621 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 24 Jul 2025 13:08:54 +0200 Subject: [PATCH 32/35] ensure task is not dereferenced --- .../projects/_crud_api_delete.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py index 83c6305f310..b0616de0d1d 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py @@ -12,6 +12,8 @@ from aiohttp import web from models_library.projects import ProjectID from models_library.users import UserID +from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY +from servicelib.utils import fire_and_forget_task from ..director_v2 import director_v2_service from ..storage.api import delete_data_folders_of_project @@ -189,8 +191,7 @@ def _log_state_when_done(fut: asyncio.Future): ) # ------ - - task = asyncio.create_task( + task = fire_and_forget_task( delete_project( app, project_uuid, @@ -198,12 +199,11 @@ def _log_state_when_done(fut: asyncio.Future): simcore_user_agent, remove_project_dynamic_services, ), - name=_DELETE_PROJECT_TASK_NAME.format(project_uuid, user_id), + task_suffix_name=_DELETE_PROJECT_TASK_NAME.format(project_uuid, user_id), + fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY], ) - assert task.get_name() == _DELETE_PROJECT_TASK_NAME.format( # nosec - project_uuid, user_id - ) + assert task in get_scheduled_tasks(project_uuid, user_id) # nosec task.add_done_callback(_log_state_when_done) return task @@ -214,5 +214,7 @@ def get_scheduled_tasks(project_uuid: ProjectID, user_id: UserID) -> list[asynci return [ task for task in asyncio.all_tasks() - if task.get_name() == _DELETE_PROJECT_TASK_NAME.format(project_uuid, user_id) + if task.get_name().endswith( + _DELETE_PROJECT_TASK_NAME.format(project_uuid, user_id) + ) ] From d9d11a9164c542ef9c52f0fc252863f8fcdd6990 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 24 Jul 2025 13:28:11 +0200 Subject: [PATCH 33/35] wait a bit for cleanup --- .../src/servicelib/long_running_tasks/task.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/service-library/src/servicelib/long_running_tasks/task.py b/packages/service-library/src/servicelib/long_running_tasks/task.py index 7c6039f7b89..b563c3db1b1 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/task.py +++ b/packages/service-library/src/servicelib/long_running_tasks/task.py @@ -12,9 +12,9 @@ from common_library.async_tools import cancel_wait_task from models_library.api_schemas_long_running_tasks.base import TaskProgress from pydantic import PositiveFloat -from servicelib.background_task import create_periodic_task -from servicelib.logging_utils import log_catch +from ..background_task import create_periodic_task +from ..logging_utils import log_catch from .errors import ( TaskAlreadyRunningError, TaskCancelledError, @@ -33,7 +33,7 @@ _DEFAULT_NAMESPACE: Final[str] = "lrt" _CANCEL_TASK_TIMEOUT: Final[PositiveFloat] = datetime.timedelta( - seconds=1 + seconds=10 # NOTE: 1 second is too short to cleanup a task ).total_seconds() RegisteredTaskName: TypeAlias = str @@ -196,7 +196,6 @@ def _add_task( *, fire_and_forget: bool, ) -> TrackedTask: - tracked_task = TrackedTask( task_id=task_id, task=task, From 56ce92067d20c8bf8edf8e786833f68af57b88cf Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 24 Jul 2025 14:14:28 +0200 Subject: [PATCH 34/35] private function --- .../simcore_service_webserver/projects/_projects_service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 04317b70598..13c33253241 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -478,7 +478,7 @@ async def delete_project_by_user( await task -def get_delete_project_task( +def _get_delete_project_task( project_uuid: ProjectID, user_id: UserID ) -> asyncio.Task | None: if tasks := _crud_api_delete.get_scheduled_tasks(project_uuid, user_id): @@ -509,7 +509,7 @@ async def submit_delete_project_task( await _crud_api_delete.mark_project_as_deleted(app, project_uuid, user_id) # Ensures ONE delete task per (project,user) pair - task = get_delete_project_task(project_uuid, user_id) + task = _get_delete_project_task(project_uuid, user_id) if not task: task = _crud_api_delete.schedule_task( app, From e7c645e9053925832fc326f5566517ca1f382781 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 24 Jul 2025 14:16:47 +0200 Subject: [PATCH 35/35] ruff --- .../projects/_controller/projects_states_rest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py index 7c9ff0bcb0f..5788428a756 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py @@ -20,8 +20,6 @@ from servicelib.rest_constants import RESPONSE_MODEL_POLICY from simcore_postgres_database.models.users import UserRole from simcore_postgres_database.webserver_models import ProjectType -from simcore_service_webserver.resource_manager.user_sessions import managed_resource -from simcore_service_webserver.socketio.server import get_socket_server from ..._meta import API_VTAG as VTAG from ...application_settings import get_application_settings @@ -30,7 +28,9 @@ from ...notifications import project_logs from ...products import products_web from ...products.models import Product +from ...resource_manager.user_sessions import managed_resource from ...security.decorators import permission_required +from ...socketio.server import get_socket_server from ...users import users_service from ...utils_aiohttp import envelope_json_response, get_api_base_url from .. import _projects_service, projects_wallets_service