Skip to content
Merged
Show file tree
Hide file tree
Changes from 22 commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
8ae2c70
Adds project document versioning and live update notifications
matusdrobuliak66 Jul 22, 2025
04af57e
Removes await from non-async socket.io enter_room calls
matusdrobuliak66 Jul 22, 2025
58519f3
Refactors project patching to centralize versioned updates
matusdrobuliak66 Jul 22, 2025
25f6e97
Clarifies progress field deprecation status in comment
matusdrobuliak66 Jul 22, 2025
6befcd9
Adds atomic project workbench updates with Redis locking and notifica…
matusdrobuliak66 Jul 22, 2025
3d7c505
refactor duplication
matusdrobuliak66 Jul 22, 2025
21b41c6
Merge branch 'master' into is1647/collaboration-features-1
matusdrobuliak66 Jul 22, 2025
7956f60
fix relative import
matusdrobuliak66 Jul 22, 2025
72bdab8
fix failing tests
matusdrobuliak66 Jul 22, 2025
d4beeed
minor
matusdrobuliak66 Jul 22, 2025
dbf088f
minor
matusdrobuliak66 Jul 22, 2025
dddb7ce
minor
matusdrobuliak66 Jul 22, 2025
bfbc285
minor
matusdrobuliak66 Jul 22, 2025
5004858
add tests
matusdrobuliak66 Jul 22, 2025
714404c
add tests
matusdrobuliak66 Jul 22, 2025
47fa937
add tests
matusdrobuliak66 Jul 22, 2025
50a40f4
Merge branch 'master' into is1647/collaboration-features-1
matusdrobuliak66 Jul 23, 2025
30b8d19
add back function that is being used
matusdrobuliak66 Jul 23, 2025
82d6d17
review @GitHK @pcrespov
matusdrobuliak66 Jul 23, 2025
8ba5ce7
fix failing tests
matusdrobuliak66 Jul 23, 2025
92af830
Fix test_open_project_more_than_limitation_of_max_studies_open_per_us…
matusdrobuliak66 Jul 23, 2025
cc9db9a
fix failing tests
matusdrobuliak66 Jul 23, 2025
44b4f36
Merge branch 'master' into is1647/collaboration-features-1
matusdrobuliak66 Jul 23, 2025
e9b432a
fix issues after resolving conflicts
matusdrobuliak66 Jul 23, 2025
5aba0d9
Merge branch 'master' into is1647/collaboration-features-1
matusdrobuliak66 Jul 23, 2025
4b845df
Refactors socket connection setup in project state tests
matusdrobuliak66 Jul 23, 2025
f2067d5
Merge branch 'is1647/collaboration-features-1' of github.com:matusdro…
matusdrobuliak66 Jul 23, 2025
369d6d7
generate open api specs
matusdrobuliak66 Jul 23, 2025
88c88fb
review @pcrespov
matusdrobuliak66 Jul 23, 2025
2f2af77
minor
matusdrobuliak66 Jul 23, 2025
e43e858
review @sanderegg
matusdrobuliak66 Jul 23, 2025
22df4d8
Improves logging and test debug visibility
matusdrobuliak66 Jul 23, 2025
5af7426
Revert "Improves logging and test debug visibility"
matusdrobuliak66 Jul 23, 2025
9c5d319
Merge branch 'master' into is1647/collaboration-features-1
matusdrobuliak66 Jul 23, 2025
75a0f7f
Merge branch 'master' into is1647/collaboration-features-1
sanderegg Jul 24, 2025
5d8196c
Merge remote-tracking branch 'upstream/master' into pr/matusdrobuliak…
sanderegg Jul 24, 2025
61e784c
Merge branch 'master' into is1647/collaboration-features-1
sanderegg Jul 24, 2025
4d20c09
removed uv locks and pyproject
sanderegg Jul 24, 2025
c5e7bbe
increase delay
sanderegg Jul 24, 2025
e0d6282
fix test createing/deleteing nodes in high throuput conditions
sanderegg Jul 24, 2025
186d976
ensure task is not dereferenced
sanderegg Jul 24, 2025
1a2adc7
Merge remote-tracking branch 'upstream/master' into pr/matusdrobuliak…
sanderegg Jul 24, 2025
d9d11a9
wait a bit for cleanup
sanderegg Jul 24, 2025
56ce920
private function
sanderegg Jul 24, 2025
e7c645e
ruff
sanderegg Jul 24, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -267,6 +267,25 @@ def to_domain_model(self) -> dict[str, Any]:
return self.model_dump(exclude_unset=True, by_alias=False)


class ProjectDocument(OutputSchema):
uuid: ProjectID
workspace_id: WorkspaceID | None
name: str
description: str
thumbnail: HttpUrl | None
last_change_date: datetime
classifiers: list[ClassifierID]
dev: dict | None
quality: dict[str, Any]
workbench: NodesDict
ui: StudyUI | None
type: ProjectType
template_type: ProjectTemplateType | None

# config
model_config = ConfigDict(from_attributes=True, arbitrary_types_allowed=True)


__all__: tuple[str, ...] = (
"EmptyModel",
"ProjectCopyOverride",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,13 +91,14 @@ def _update_json_schema_extra(schema: JsonDict) -> None:

class StudyUI(OutputSchema):
# Model fully controlled by the UI and stored under `projects.ui`
icon: HttpUrl | None = None
icon: HttpUrl | None = None # <-- Deprecated

workbench: dict[NodeIDStr, WorkbenchUI] | None = None
slideshow: dict[NodeIDStr, SlideshowUI] | None = None
current_node_id: NodeID | None = None
annotations: dict[NodeIDStr, AnnotationUI] | None = None
template_type: Literal["hypertool"] | None = None
template_type: Literal["hypertool"] | None = None # <-- Deprecated
mode: Literal["workbench", "app", "guided", "standalone", "pipeline"] | None = None

_empty_is_none = field_validator("*", mode="before")(
empty_str_to_none_pre_validator
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from ..basic_types import IDStr
from ..groups import GroupID
from ..projects import ProjectID
from ..users import UserID


Expand All @@ -15,3 +16,7 @@ def from_group_id(cls, group_id: GroupID) -> "SocketIORoomStr":
@classmethod
def from_user_id(cls, user_id: UserID) -> "SocketIORoomStr":
return cls(f"user:{user_id}")

@classmethod
def from_project_id(cls, project_id: ProjectID) -> "SocketIORoomStr":
return cls(f"project:{project_id}")
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ class Node(BaseModel):
ge=0,
le=100,
description="the node progress value (deprecated in DB, still used for API only)",
deprecated=True,
deprecated=True, # <-- I think this is not true, it is still used by the File Picker (frontend node)
),
] = None

Expand Down
8 changes: 8 additions & 0 deletions packages/service-library/src/servicelib/redis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,11 @@
ProjectLockError,
)
from ._models import RedisManagerDBConfig
from ._project_document_version import (
PROJECT_DB_UPDATE_REDIS_LOCK_KEY,
PROJECT_DOCUMENT_VERSION_KEY,
increment_and_return_project_document_version,
)
from ._project_lock import (
get_project_locked_state,
is_project_locked,
Expand All @@ -19,10 +24,13 @@
"CouldNotAcquireLockError",
"CouldNotConnectToRedisError",
"exclusive",
"increment_and_return_project_document_version",
"get_project_locked_state",
"handle_redis_returns_union_types",
"is_project_locked",
"LockLostError",
"PROJECT_DB_UPDATE_REDIS_LOCK_KEY",
"PROJECT_DOCUMENT_VERSION_KEY",
"ProjectLockError",
"RedisClientSDK",
"RedisClientsManager",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
"""Project document versioning utilities.

This module provides utilities for managing project document versions using Redis.
The versioning system ensures that all users working on a project are synchronized
with the latest changes through atomic version incrementing.
"""

from typing import Final

from models_library.projects import ProjectID

from ._client import RedisClientSDK

# Redis key patterns
PROJECT_DOCUMENT_VERSION_KEY: Final[str] = "projects:{}:version"
PROJECT_DB_UPDATE_REDIS_LOCK_KEY: Final[str] = "project_db_update:{}"


async def increment_and_return_project_document_version(
redis_client: RedisClientSDK, project_uuid: ProjectID
) -> int:
"""
Atomically increments and returns the project document version using Redis.
Returns the incremented version number.

This function ensures thread-safe version incrementing by using Redis INCR command
which is atomic. The version starts at 1 for the first call.

Args:
redis_client: The Redis client SDK instance
project_uuid: The project UUID to get/increment version for

Returns:
The new (incremented) version number
"""
version_key = PROJECT_DOCUMENT_VERSION_KEY.format(project_uuid)
# If key doesn't exist, it's created with value 0 and then incremented to 1
output = await redis_client.redis.incr(version_key)
return int(output)
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument

from typing import cast
from uuid import UUID

import pytest
from faker import Faker
from models_library.projects import ProjectID
from servicelib.redis import RedisClientSDK
from servicelib.redis._project_document_version import (
increment_and_return_project_document_version,
)

pytest_simcore_core_services_selection = [
"redis",
]
pytest_simcore_ops_services_selection = [
"redis-commander",
]


@pytest.fixture()
def project_uuid(faker: Faker) -> ProjectID:
return cast(UUID, faker.uuid4(cast_to=None))


async def test_project_document_version_workflow(
redis_client_sdk: RedisClientSDK, project_uuid: ProjectID
):
"""Test the complete workflow of getting and incrementing project document versions."""

# First increment should return 1
new_version = await increment_and_return_project_document_version(
redis_client_sdk, project_uuid
)
assert new_version == 1

# Second increment should return 2
new_version = await increment_and_return_project_document_version(
redis_client_sdk, project_uuid
)
assert new_version == 2

# Multiple increments should work correctly
for expected_version in range(3, 6):
new_version = await increment_and_return_project_document_version(
redis_client_sdk, project_uuid
)
assert new_version == expected_version
1 change: 1 addition & 0 deletions packages/settings-library/src/settings_library/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ class RedisDatabase(IntEnum):
DEFERRED_TASKS = 7
DYNAMIC_SERVICES = 8
CELERY_TASKS = 9
DOCUMENTS = 10


class RedisSettings(BaseCustomSettings):
Expand Down
5 changes: 3 additions & 2 deletions services/docker-compose-ops.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,9 @@ services:
announcements:${REDIS_HOST}:${REDIS_PORT}:5:${REDIS_PASSWORD},
distributed_identifiers:${REDIS_HOST}:${REDIS_PORT}:6:${REDIS_PASSWORD},
deferred_tasks:${REDIS_HOST}:${REDIS_PORT}:7:${REDIS_PASSWORD},
dynamic_services:${REDIS_HOST}:${REDIS_PORT}:8:${REDIS_PASSWORD}
celery_tasks:${REDIS_HOST}:${REDIS_PORT}:9:${REDIS_PASSWORD}
dynamic_services:${REDIS_HOST}:${REDIS_PORT}:8:${REDIS_PASSWORD},
celery_tasks:${REDIS_HOST}:${REDIS_PORT}:9:${REDIS_PASSWORD},
documents:${REDIS_HOST}:${REDIS_PORT}:10:${REDIS_PASSWORD}
# If you add/remove a db, do not forget to update the --databases entry in the docker-compose.yml
ports:
- "18081:8081"
Expand Down
2 changes: 1 addition & 1 deletion services/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1442,7 +1442,7 @@ services:
# also aof (append only) is also enabled such that we get full durability at the expense
# of backup size. The backup is written into /data.
# https://redis.io/topics/persistence
[ "redis-server", "--save", "60 1", "--loglevel", "verbose", "--databases", "10", "--appendonly", "yes", "--requirepass", "${REDIS_PASSWORD}" ]
[ "redis-server", "--save", "60 1", "--loglevel", "verbose", "--databases", "11", "--appendonly", "yes", "--requirepass", "${REDIS_PASSWORD}" ]
networks:
- default
- autoscaling_subnet
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
from ..db.plugin import get_asyncpg_engine
from ..projects import _folders_repository as projects_folders_repository
from ..projects import _groups_repository as projects_groups_repository
from ..projects import _projects_repository as _projects_repository
from ..projects._access_rights_service import check_user_project_permission
from ..projects.api import patch_project_and_notify_users
from ..users import users_service
from ..workspaces.api import check_user_workspace_access
from . import _folders_repository
Expand Down Expand Up @@ -75,14 +75,15 @@ async def move_folder_into_workspace(

# ⬆️ Here we have already guaranties that user has all the right permissions to do this operation ⬆️

user: dict = await users_service.get_user(app, user_id)
async with transaction_context(get_asyncpg_engine(app)) as conn:
# 4. Update workspace ID on the project resource
for project_id in project_ids:
await _projects_repository.patch_project(
await patch_project_and_notify_users(
app=app,
connection=conn,
project_uuid=project_id,
new_partial_project_data={"workspace_id": workspace_id},
patch_project_data={"workspace_id": workspace_id},
user_primary_gid=user["primary_gid"],
)

# 5. BATCH update of folders with workspace_id
Expand Down Expand Up @@ -122,7 +123,6 @@ async def move_folder_into_workspace(
)

# 9. Remove all project permissions, leave only the user who moved the project
user = await users_service.get_user(app, user_id=user_id)
for project_id in project_ids:
await projects_groups_repository.delete_all_project_groups(
app, connection=conn, project_id=project_id
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,7 @@ async def patch_project(request: web.Request):
path_params = parse_request_path_parameters_as(ProjectPathParams, request)
project_patch = await parse_request_body_as(ProjectPatch, request)

await _projects_service.patch_project(
await _projects_service.patch_project_for_user(
request.app,
user_id=req_ctx.user_id,
project_uuid=path_params.project_id,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from aiohttp import web
from models_library.api_schemas_webserver.projects import ProjectGet
from models_library.api_schemas_webserver.socketio import SocketIORoomStr
from models_library.projects_state import ProjectState
from pydantic import BaseModel
from servicelib.aiohttp import status
Expand All @@ -18,6 +19,8 @@
)
from simcore_postgres_database.models.users import UserRole
from simcore_postgres_database.webserver_models import ProjectType
from simcore_service_webserver.resource_manager.user_sessions import managed_resource
from simcore_service_webserver.socketio.server import get_socket_server

from ..._meta import API_VTAG as VTAG
from ...director_v2.exceptions import DirectorV2ServiceError
Expand Down Expand Up @@ -103,6 +106,20 @@ async def open_project(request: web.Request) -> web.Response:
):
raise HTTPLockedError(text="Project is locked, try later")

# Connect the socket_id to a project room
with managed_resource(
req_ctx.user_id, client_session_id, request.app
) as resource_registry:
_socket_id = await resource_registry.get_socket_id()
if _socket_id is None:
raise web.HTTPBadRequest(
text="Cannot open project without a socket_id, please refresh the page"
)
sio = get_socket_server(request.app)
sio.enter_room(
_socket_id, SocketIORoomStr.from_project_id(path_params.project_id)
)

# the project can be opened, let's update its product links
await _projects_service.update_project_linked_product(
request.app, path_params.project_id, req_ctx.product_name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
from ..users import users_service
from ..workspaces.api import check_user_workspace_access, get_user_workspace
from ..workspaces.errors import WorkspaceAccessForbiddenError
from . import _folders_repository, _projects_service
from . import _folders_repository, _projects_repository, _projects_service
from ._metadata_service import set_project_ancestors
from ._permalink_service import update_or_pop_permalink_in_project
from ._projects_repository_legacy import ProjectDBAPI
Expand Down Expand Up @@ -162,10 +162,10 @@ async def _copy_files_from_source_project(
product_name: str,
task_progress: TaskProgress,
):
_projects_repository = ProjectDBAPI.get_from_app_context(app)
_projects_repository_legacy = ProjectDBAPI.get_from_app_context(app)

needs_lock_source_project: bool = (
await _projects_repository.get_project_type(
await _projects_repository_legacy.get_project_type(
TypeAdapter(ProjectID).validate_python(source_project["uuid"])
)
!= ProjectTypeDB.TEMPLATE
Expand Down Expand Up @@ -294,7 +294,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche
f"{from_study=}",
)

_projects_repository = ProjectDBAPI.get_from_app_context(request.app)
_projects_repository_legacy = ProjectDBAPI.get_from_app_context(request.app)

new_project: ProjectDict = {}
copy_file_coro = None
Expand Down Expand Up @@ -372,7 +372,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche
)

# 3.1 save new project in DB
new_project = await _projects_repository.insert_project(
new_project = await _projects_repository_legacy.insert_project(
project=jsonable_encoder(new_project),
user_id=user_id,
product_name=product_name,
Expand Down Expand Up @@ -408,8 +408,10 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche

# 5. unhide the project if needed since it is now complete
if not new_project_was_hidden_before_data_was_copied:
await _projects_repository.set_hidden_flag(
new_project["uuid"], hidden=False
await _projects_repository.patch_project(
request.app,
project_uuid=new_project["uuid"],
new_partial_project_data={"hidden": False},
)

# update the network information in director-v2
Expand All @@ -427,7 +429,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche
product_api_base_url,
)
# get the latest state of the project (lastChangeDate for instance)
new_project, _ = await _projects_repository.get_project_dict_and_type(
new_project, _ = await _projects_repository_legacy.get_project_dict_and_type(
project_uuid=new_project["uuid"]
)
# Appends state
Expand All @@ -444,7 +446,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche

# Adds folderId
user_specific_project_data_db = (
await _projects_repository.get_user_specific_project_data_db(
await _projects_repository_legacy.get_user_specific_project_data_db(
project_uuid=new_project["uuid"],
private_workspace_user_id_or_none=(
user_id if workspace_id is None else None
Expand All @@ -467,7 +469,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche
for gid, access in workspace.access_rights.items()
}

_project_product_name = await _projects_repository.get_project_product(
_project_product_name = await _projects_repository_legacy.get_project_product(
project_uuid=new_project["uuid"]
)
assert (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from ..storage.api import delete_data_folders_of_project
from ..users.exceptions import UserNotFoundError
from ..users.users_service import FullNameDict
from . import _projects_repository
from ._access_rights_service import check_user_project_permission
from ._projects_repository_legacy import ProjectDBAPI
from .exceptions import (
Expand Down Expand Up @@ -70,7 +71,11 @@ async def mark_project_as_deleted(
# NOTE: if any of the steps below fail, it might results in a
# services/projects/data that might be incosistent. The GC should
# be able to detect that and resolve it.
await db.set_hidden_flag(project_uuid, hidden=True)
await _projects_repository.patch_project(
app,
project_uuid=project_uuid,
new_partial_project_data={"hidden": True},
)


async def delete_project(
Expand Down
Loading
Loading