Skip to content
Merged
Show file tree
Hide file tree
Changes from 18 commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
8ae2c70
Adds project document versioning and live update notifications
matusdrobuliak66 Jul 22, 2025
04af57e
Removes await from non-async socket.io enter_room calls
matusdrobuliak66 Jul 22, 2025
58519f3
Refactors project patching to centralize versioned updates
matusdrobuliak66 Jul 22, 2025
25f6e97
Clarifies progress field deprecation status in comment
matusdrobuliak66 Jul 22, 2025
6befcd9
Adds atomic project workbench updates with Redis locking and notifica…
matusdrobuliak66 Jul 22, 2025
3d7c505
refactor duplication
matusdrobuliak66 Jul 22, 2025
21b41c6
Merge branch 'master' into is1647/collaboration-features-1
matusdrobuliak66 Jul 22, 2025
7956f60
fix relative import
matusdrobuliak66 Jul 22, 2025
72bdab8
fix failing tests
matusdrobuliak66 Jul 22, 2025
d4beeed
minor
matusdrobuliak66 Jul 22, 2025
dbf088f
minor
matusdrobuliak66 Jul 22, 2025
dddb7ce
minor
matusdrobuliak66 Jul 22, 2025
bfbc285
minor
matusdrobuliak66 Jul 22, 2025
5004858
add tests
matusdrobuliak66 Jul 22, 2025
714404c
add tests
matusdrobuliak66 Jul 22, 2025
47fa937
add tests
matusdrobuliak66 Jul 22, 2025
50a40f4
Merge branch 'master' into is1647/collaboration-features-1
matusdrobuliak66 Jul 23, 2025
30b8d19
add back function that is being used
matusdrobuliak66 Jul 23, 2025
82d6d17
review @GitHK @pcrespov
matusdrobuliak66 Jul 23, 2025
8ba5ce7
fix failing tests
matusdrobuliak66 Jul 23, 2025
92af830
Fix test_open_project_more_than_limitation_of_max_studies_open_per_us…
matusdrobuliak66 Jul 23, 2025
cc9db9a
fix failing tests
matusdrobuliak66 Jul 23, 2025
44b4f36
Merge branch 'master' into is1647/collaboration-features-1
matusdrobuliak66 Jul 23, 2025
e9b432a
fix issues after resolving conflicts
matusdrobuliak66 Jul 23, 2025
5aba0d9
Merge branch 'master' into is1647/collaboration-features-1
matusdrobuliak66 Jul 23, 2025
4b845df
Refactors socket connection setup in project state tests
matusdrobuliak66 Jul 23, 2025
f2067d5
Merge branch 'is1647/collaboration-features-1' of github.com:matusdro…
matusdrobuliak66 Jul 23, 2025
369d6d7
generate open api specs
matusdrobuliak66 Jul 23, 2025
88c88fb
review @pcrespov
matusdrobuliak66 Jul 23, 2025
2f2af77
minor
matusdrobuliak66 Jul 23, 2025
e43e858
review @sanderegg
matusdrobuliak66 Jul 23, 2025
22df4d8
Improves logging and test debug visibility
matusdrobuliak66 Jul 23, 2025
5af7426
Revert "Improves logging and test debug visibility"
matusdrobuliak66 Jul 23, 2025
9c5d319
Merge branch 'master' into is1647/collaboration-features-1
matusdrobuliak66 Jul 23, 2025
75a0f7f
Merge branch 'master' into is1647/collaboration-features-1
sanderegg Jul 24, 2025
5d8196c
Merge remote-tracking branch 'upstream/master' into pr/matusdrobuliak…
sanderegg Jul 24, 2025
61e784c
Merge branch 'master' into is1647/collaboration-features-1
sanderegg Jul 24, 2025
4d20c09
removed uv locks and pyproject
sanderegg Jul 24, 2025
c5e7bbe
increase delay
sanderegg Jul 24, 2025
e0d6282
fix test createing/deleteing nodes in high throuput conditions
sanderegg Jul 24, 2025
186d976
ensure task is not dereferenced
sanderegg Jul 24, 2025
1a2adc7
Merge remote-tracking branch 'upstream/master' into pr/matusdrobuliak…
sanderegg Jul 24, 2025
d9d11a9
wait a bit for cleanup
sanderegg Jul 24, 2025
56ce920
private function
sanderegg Jul 24, 2025
e7c645e
ruff
sanderegg Jul 24, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -267,6 +267,25 @@ def to_domain_model(self) -> dict[str, Any]:
return self.model_dump(exclude_unset=True, by_alias=False)


class ProjectDocument(OutputSchema):
uuid: ProjectID
workspace_id: WorkspaceID | None
name: str
description: str
thumbnail: HttpUrl | None
last_change_date: datetime
classifiers: list[ClassifierID]
dev: dict | None
quality: dict[str, Any]
workbench: NodesDict
ui: StudyUI | None
type: ProjectType
template_type: ProjectTemplateType | None

# config
model_config = ConfigDict(from_attributes=True, arbitrary_types_allowed=True)


__all__: tuple[str, ...] = (
"EmptyModel",
"ProjectCopyOverride",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,13 +91,14 @@ def _update_json_schema_extra(schema: JsonDict) -> None:

class StudyUI(OutputSchema):
# Model fully controlled by the UI and stored under `projects.ui`
icon: HttpUrl | None = None
icon: HttpUrl | None = None # <-- Deprecated

workbench: dict[NodeIDStr, WorkbenchUI] | None = None
slideshow: dict[NodeIDStr, SlideshowUI] | None = None
current_node_id: NodeID | None = None
annotations: dict[NodeIDStr, AnnotationUI] | None = None
template_type: Literal["hypertool"] | None = None
template_type: Literal["hypertool"] | None = None # <-- Deprecated
mode: Literal["workbench", "app", "guided", "standalone", "pipeline"] | None = None

_empty_is_none = field_validator("*", mode="before")(
empty_str_to_none_pre_validator
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from ..basic_types import IDStr
from ..groups import GroupID
from ..projects import ProjectID
from ..users import UserID


Expand All @@ -15,3 +16,7 @@ def from_group_id(cls, group_id: GroupID) -> "SocketIORoomStr":
@classmethod
def from_user_id(cls, user_id: UserID) -> "SocketIORoomStr":
return cls(f"user:{user_id}")

@classmethod
def from_project_id(cls, project_id: ProjectID) -> "SocketIORoomStr":
return cls(f"project:{project_id}")
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ class Node(BaseModel):
ge=0,
le=100,
description="the node progress value (deprecated in DB, still used for API only)",
deprecated=True,
deprecated=True, # <-- I think this is not true, it is still used by the File Picker (frontend node)
),
] = None

Expand Down
10 changes: 10 additions & 0 deletions packages/service-library/src/servicelib/redis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,12 @@
ProjectLockError,
)
from ._models import RedisManagerDBConfig
from ._project_document_version import (
PROJECT_DB_UPDATE_REDIS_LOCK_KEY,
PROJECT_DOCUMENT_VERSION_KEY,
get_and_increment_project_document_version,
get_project_document_version,
)
from ._project_lock import (
get_project_locked_state,
is_project_locked,
Expand All @@ -19,10 +25,14 @@
"CouldNotAcquireLockError",
"CouldNotConnectToRedisError",
"exclusive",
"get_and_increment_project_document_version",
"get_project_document_version",
"get_project_locked_state",
"handle_redis_returns_union_types",
"is_project_locked",
"LockLostError",
"PROJECT_DB_UPDATE_REDIS_LOCK_KEY",
"PROJECT_DOCUMENT_VERSION_KEY",
"ProjectLockError",
"RedisClientSDK",
"RedisClientsManager",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
"""Project document versioning utilities.
This module provides utilities for managing project document versions using Redis.
The versioning system ensures that all users working on a project are synchronized
with the latest changes through atomic version incrementing.
"""

from typing import Final

from models_library.projects import ProjectID

from ._client import RedisClientSDK

# Redis key patterns
PROJECT_DOCUMENT_VERSION_KEY: Final[str] = "projects:{}:version"
PROJECT_DB_UPDATE_REDIS_LOCK_KEY: Final[str] = "project_db_update:{}"


async def get_and_increment_project_document_version(
redis_client: RedisClientSDK, project_uuid: ProjectID
) -> int:
"""
Atomically gets and increments the project document version using Redis.
Returns the incremented version number.
This function ensures thread-safe version incrementing by using Redis INCR command
which is atomic. The version starts at 1 for the first call.
Args:
redis_client: The Redis client SDK instance
project_uuid: The project UUID to get/increment version for
Returns:
The new (incremented) version number
"""
version_key = PROJECT_DOCUMENT_VERSION_KEY.format(project_uuid)
# If key doesn't exist, it's created with value 0 and then incremented to 1
output = await redis_client.redis.incr(version_key)
return int(output)


async def get_project_document_version(
redis_client: RedisClientSDK, project_uuid: ProjectID
) -> int:
"""
Gets the current project document version without incrementing it.
Args:
redis_client: The Redis client SDK instance
project_uuid: The project UUID to get version for
Returns:
The current version number (0 if no version exists yet)
"""
version_key = PROJECT_DOCUMENT_VERSION_KEY.format(project_uuid)
version = await redis_client.redis.get(version_key)
return int(version) if version is not None else 0
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument

from typing import cast
from uuid import UUID

import pytest
from faker import Faker
from models_library.projects import ProjectID
from servicelib.redis import RedisClientSDK
from servicelib.redis._project_document_version import (
get_and_increment_project_document_version,
get_project_document_version,
)

pytest_simcore_core_services_selection = [
"redis",
]
pytest_simcore_ops_services_selection = [
"redis-commander",
]


@pytest.fixture()
def project_uuid(faker: Faker) -> ProjectID:
return cast(UUID, faker.uuid4(cast_to=None))


async def test_project_document_version_workflow(
redis_client_sdk: RedisClientSDK, project_uuid: ProjectID
):
"""Test the complete workflow of getting and incrementing project document versions."""

# Initially, version should be 0 (no version exists yet)
current_version = await get_project_document_version(redis_client_sdk, project_uuid)
assert current_version == 0

# First increment should return 1
new_version = await get_and_increment_project_document_version(
redis_client_sdk, project_uuid
)
assert new_version == 1

# Getting current version should now return 1
current_version = await get_project_document_version(redis_client_sdk, project_uuid)
assert current_version == 1

# Second increment should return 2
new_version = await get_and_increment_project_document_version(
redis_client_sdk, project_uuid
)
assert new_version == 2

# Getting current version should now return 2
current_version = await get_project_document_version(redis_client_sdk, project_uuid)
assert current_version == 2

# Multiple increments should work correctly
for expected_version in range(3, 6):
new_version = await get_and_increment_project_document_version(
redis_client_sdk, project_uuid
)
assert new_version == expected_version

current_version = await get_project_document_version(
redis_client_sdk, project_uuid
)
assert current_version == expected_version
1 change: 1 addition & 0 deletions packages/settings-library/src/settings_library/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ class RedisDatabase(IntEnum):
DEFERRED_TASKS = 7
DYNAMIC_SERVICES = 8
CELERY_TASKS = 9
DOCUMENTS = 10


class RedisSettings(BaseCustomSettings):
Expand Down
5 changes: 3 additions & 2 deletions services/docker-compose-ops.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,9 @@ services:
announcements:${REDIS_HOST}:${REDIS_PORT}:5:${REDIS_PASSWORD},
distributed_identifiers:${REDIS_HOST}:${REDIS_PORT}:6:${REDIS_PASSWORD},
deferred_tasks:${REDIS_HOST}:${REDIS_PORT}:7:${REDIS_PASSWORD},
dynamic_services:${REDIS_HOST}:${REDIS_PORT}:8:${REDIS_PASSWORD}
celery_tasks:${REDIS_HOST}:${REDIS_PORT}:9:${REDIS_PASSWORD}
dynamic_services:${REDIS_HOST}:${REDIS_PORT}:8:${REDIS_PASSWORD},
celery_tasks:${REDIS_HOST}:${REDIS_PORT}:9:${REDIS_PASSWORD},
documents:${REDIS_HOST}:${REDIS_PORT}:10:${REDIS_PASSWORD}
# If you add/remove a db, do not forget to update the --databases entry in the docker-compose.yml
ports:
- "18081:8081"
Expand Down
2 changes: 1 addition & 1 deletion services/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1442,7 +1442,7 @@ services:
# also aof (append only) is also enabled such that we get full durability at the expense
# of backup size. The backup is written into /data.
# https://redis.io/topics/persistence
[ "redis-server", "--save", "60 1", "--loglevel", "verbose", "--databases", "10", "--appendonly", "yes", "--requirepass", "${REDIS_PASSWORD}" ]
[ "redis-server", "--save", "60 1", "--loglevel", "verbose", "--databases", "11", "--appendonly", "yes", "--requirepass", "${REDIS_PASSWORD}" ]
networks:
- default
- autoscaling_subnet
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
from ..db.plugin import get_asyncpg_engine
from ..projects import _folders_repository as projects_folders_repository
from ..projects import _groups_repository as projects_groups_repository
from ..projects import _projects_repository as _projects_repository
from ..projects._access_rights_service import check_user_project_permission
from ..projects.api import patch_project_and_notify_users
from ..users import users_service
from ..workspaces.api import check_user_workspace_access
from . import _folders_repository
Expand Down Expand Up @@ -75,14 +75,15 @@ async def move_folder_into_workspace(

# ⬆️ Here we have already guaranties that user has all the right permissions to do this operation ⬆️

user: dict = await users_service.get_user(app, user_id)
async with transaction_context(get_asyncpg_engine(app)) as conn:
# 4. Update workspace ID on the project resource
for project_id in project_ids:
await _projects_repository.patch_project(
await patch_project_and_notify_users(
app=app,
connection=conn,
project_uuid=project_id,
new_partial_project_data={"workspace_id": workspace_id},
patch_project_data={"workspace_id": workspace_id},
user_primary_gid=user["primary_gid"],
)

# 5. BATCH update of folders with workspace_id
Expand Down Expand Up @@ -122,7 +123,6 @@ async def move_folder_into_workspace(
)

# 9. Remove all project permissions, leave only the user who moved the project
user = await users_service.get_user(app, user_id=user_id)
for project_id in project_ids:
await projects_groups_repository.delete_all_project_groups(
app, connection=conn, project_id=project_id
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,7 @@ async def patch_project(request: web.Request):
path_params = parse_request_path_parameters_as(ProjectPathParams, request)
project_patch = await parse_request_body_as(ProjectPatch, request)

await _projects_service.patch_project(
await _projects_service.patch_project_for_user(
request.app,
user_id=req_ctx.user_id,
project_uuid=path_params.project_id,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from aiohttp import web
from models_library.api_schemas_webserver.projects import ProjectGet
from models_library.api_schemas_webserver.socketio import SocketIORoomStr
from models_library.projects_state import ProjectState
from pydantic import BaseModel
from servicelib.aiohttp import status
Expand All @@ -18,6 +19,8 @@
)
from simcore_postgres_database.models.users import UserRole
from simcore_postgres_database.webserver_models import ProjectType
from simcore_service_webserver.resource_manager.user_sessions import managed_resource
from simcore_service_webserver.socketio.server import get_socket_server

from ..._meta import API_VTAG as VTAG
from ...director_v2.exceptions import DirectorV2ServiceError
Expand Down Expand Up @@ -103,6 +106,20 @@ async def open_project(request: web.Request) -> web.Response:
):
raise HTTPLockedError(text="Project is locked, try later")

# Connect the socket_id to a project room
with managed_resource(
req_ctx.user_id, client_session_id, request.app
) as resource_registry:
_socket_id = await resource_registry.get_socket_id()
if _socket_id is None:
raise web.HTTPBadRequest(
text="Cannot open project without a socket_id, please refresh the page"
)
sio = get_socket_server(request.app)
sio.enter_room(
_socket_id, SocketIORoomStr.from_project_id(path_params.project_id)
)

# the project can be opened, let's update its product links
await _projects_service.update_project_linked_product(
request.app, path_params.project_id, req_ctx.product_name
Expand Down
Loading
Loading