From a11fef6f5a116abdd27e910f4e7d05d857825192 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 21 May 2025 13:48:53 +0200 Subject: [PATCH 1/9] migration --- api/specs/web-server/_computations.py | 10 +- .../api_schemas_webserver/computations.py | 15 +- .../src/models_library/projects_metadata.py | 17 ++ ...8719855b_add_index_to_projects_metadata.py | 34 +++ .../models/projects_metadata.py | 8 +- .../director_v2/computations.py | 8 +- .../api/rpc/_computations.py | 37 ++- .../modules/db/repositories/comp_runs.py | 8 +- .../db/repositories/comp_tasks/_core.py | 8 +- .../test_api_rpc_computations.py | 4 +- .../api/v0/openapi.yaml | 14 + .../director_v2/_computations_service.py | 72 ++++- .../_computations_service_utils.py | 247 ------------------ .../_controller/computations_rest.py | 18 +- .../projects/_metadata_repository.py | 52 ++++ .../projects/_metadata_service.py | 8 + .../projects/projects_metadata_service.py | 10 +- 17 files changed, 271 insertions(+), 299 deletions(-) create mode 100644 packages/models-library/src/models_library/projects_metadata.py create mode 100644 packages/postgres-database/src/simcore_postgres_database/migration/versions/4e7d8719855b_add_index_to_projects_metadata.py delete mode 100644 services/web/server/src/simcore_service_webserver/director_v2/_computations_service_utils.py diff --git a/api/specs/web-server/_computations.py b/api/specs/web-server/_computations.py index b40c2550a16..fb60fce4175 100644 --- a/api/specs/web-server/_computations.py +++ b/api/specs/web-server/_computations.py @@ -6,9 +6,10 @@ from models_library.api_schemas_webserver.computations import ( ComputationGet, ComputationPathParams, + ComputationRunIterationsLatestListQueryParams, + ComputationRunIterationsListQueryParams, ComputationRunPathParams, ComputationRunRestGet, - ComputationRunWithFiltersListQueryParams, ComputationStart, ComputationStarted, ComputationTaskRestGet, @@ -16,7 +17,6 @@ from models_library.generics import Envelope from simcore_service_webserver._meta import API_VTAG from simcore_service_webserver.director_v2._controller.computations_rest import ( - ComputationRunListQueryParams, ComputationTaskListQueryParams, ComputationTaskPathParams, ) @@ -71,7 +71,9 @@ async def stop_computation(_path: Annotated[ComputationPathParams, Depends()]): response_model=Page[ComputationRunRestGet], ) async def list_computations_latest_iteration( - _query: Annotated[as_query(ComputationRunWithFiltersListQueryParams), Depends()], + _query: Annotated[ + as_query(ComputationRunIterationsLatestListQueryParams), Depends() + ], ): ... @@ -80,7 +82,7 @@ async def list_computations_latest_iteration( response_model=Page[ComputationRunRestGet], ) async def list_computation_iterations( - _query: Annotated[as_query(ComputationRunListQueryParams), Depends()], + _query: Annotated[as_query(ComputationRunIterationsListQueryParams), Depends()], _path: Annotated[ComputationRunPathParams, Depends()], ): ... diff --git a/packages/models-library/src/models_library/api_schemas_webserver/computations.py b/packages/models-library/src/models_library/api_schemas_webserver/computations.py index a1f955b20fe..a3de7285621 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/computations.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/computations.py @@ -84,13 +84,20 @@ class ComputationRunListQueryParams( ): ... -class ComputationRunWithFiltersListQueryParams(ComputationRunListQueryParams): +class ComputationRunIterationsLatestListQueryParams(ComputationRunListQueryParams): filter_only_running: bool = Field( default=False, description="If true, only running computations are returned", ) +class ComputationRunIterationsListQueryParams(ComputationRunListQueryParams): + include_children: bool = Field( + default=False, + description="If true, all tasks of the project and its children are returned (Currently supported only for root projects)", + ) + + class ComputationRunRestGet(OutputSchema): project_uuid: ProjectID iteration: int @@ -128,7 +135,11 @@ class ComputationTaskPathParams(BaseModel): class ComputationTaskListQueryParams( PageQueryParameters, ComputationTaskListOrderParams, # type: ignore[misc, valid-type] -): ... +): + include_children: bool = Field( + default=False, + description="If true, all tasks of the project and its children are returned (Currently supported only for root projects)", + ) class ComputationTaskRestGet(OutputSchema): diff --git a/packages/models-library/src/models_library/projects_metadata.py b/packages/models-library/src/models_library/projects_metadata.py new file mode 100644 index 00000000000..0290ff03d0d --- /dev/null +++ b/packages/models-library/src/models_library/projects_metadata.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import Any + +from pydantic import BaseModel + +from .projects import ProjectID + + +class ProjectsMetadataDBGet(BaseModel): + project_uuid: ProjectID + custom: dict[str, Any] + created: datetime + modified: datetime + parent_project_uuid: ProjectID + parent_node_id: ProjectID + root_parent_project_uuid: ProjectID + root_parent_node_id: ProjectID diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/4e7d8719855b_add_index_to_projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/4e7d8719855b_add_index_to_projects_metadata.py new file mode 100644 index 00000000000..75ce061eddc --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/4e7d8719855b_add_index_to_projects_metadata.py @@ -0,0 +1,34 @@ +"""add index to projects_metadata + +Revision ID: 4e7d8719855b +Revises: e98c45ff314f +Create Date: 2025-05-21 11:48:34.062860+00:00 + +""" + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "4e7d8719855b" +down_revision = "e98c45ff314f" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_index( + "idx_projects_metadata_root_parent_project_uuid", + "projects_metadata", + ["root_parent_project_uuid"], + unique=False, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index( + "idx_projects_metadata_root_parent_project_uuid", table_name="projects_metadata" + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/models/projects_metadata.py index c5379f407d4..3e6ed034a96 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/projects_metadata.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/projects_metadata.py @@ -1,6 +1,6 @@ """ - These tables were designed to be controled by projects-plugin in - the webserver's service +These tables were designed to be controled by projects-plugin in +the webserver's service """ import sqlalchemy as sa @@ -100,6 +100,10 @@ ondelete=RefActions.SET_NULL, name="fk_projects_metadata_root_parent_node_id", ), + ####### + sa.Index( + "idx_projects_metadata_root_parent_project_uuid", "root_parent_project_uuid" + ), ) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/computations.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/computations.py index 24720ecd512..a24ed19aba9 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/computations.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/computations.py @@ -64,7 +64,7 @@ async def list_computations_iterations_page( *, product_name: ProductName, user_id: UserID, - project_id: ProjectID, + project_ids: list[ProjectID], # pagination offset: int = 0, limit: int = 20, @@ -76,7 +76,7 @@ async def list_computations_iterations_page( _RPC_METHOD_NAME_ADAPTER.validate_python("list_computations_iterations_page"), product_name=product_name, user_id=user_id, - project_id=project_id, + project_ids=project_ids, offset=offset, limit=limit, order_by=order_by, @@ -92,7 +92,7 @@ async def list_computations_latest_iteration_tasks_page( *, product_name: ProductName, user_id: UserID, - project_id: ProjectID, + project_ids: list[ProjectID], # pagination offset: int = 0, limit: int = 20, @@ -106,7 +106,7 @@ async def list_computations_latest_iteration_tasks_page( ), product_name=product_name, user_id=user_id, - project_id=project_id, + project_ids=project_ids, offset=offset, limit=limit, order_by=order_by, diff --git a/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations.py b/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations.py index d0caa15a869..183f2920387 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations.py @@ -59,7 +59,7 @@ async def list_computations_iterations_page( *, product_name: ProductName, user_id: UserID, - project_id: ProjectID, + project_ids: list[ProjectID], # pagination offset: int = 0, limit: int = 20, @@ -71,7 +71,7 @@ async def list_computations_iterations_page( await comp_runs_repo.list_for_user_and_project_all_iterations( product_name=product_name, user_id=user_id, - project_id=project_id, + project_ids=project_ids, offset=offset, limit=limit, order_by=order_by, @@ -84,12 +84,12 @@ async def list_computations_iterations_page( async def _fetch_task_log( - user_id: UserID, project_id: ProjectID, task: ComputationTaskForRpcDBGet + user_id: UserID, task: ComputationTaskForRpcDBGet ) -> TaskLogFileGet | None: if not task.state.is_running(): return await dask_utils.get_task_log_file( user_id=user_id, - project_id=project_id, + project_id=task.project_uuid, node_id=task.node_id, ) return None @@ -101,7 +101,7 @@ async def list_computations_latest_iteration_tasks_page( *, product_name: ProductName, user_id: UserID, - project_id: ProjectID, + project_ids: list[ProjectID], # pagination offset: int = 0, limit: int = 20, @@ -114,20 +114,30 @@ async def list_computations_latest_iteration_tasks_page( comp_tasks_repo = CompTasksRepository.instance(db_engine=app.state.engine) comp_runs_repo = CompRunsRepository.instance(db_engine=app.state.engine) - comp_latest_run = await comp_runs_repo.get( - user_id=user_id, project_id=project_id, iteration=None # Returns last iteration - ) - total, comp_tasks = await comp_tasks_repo.list_computational_tasks_rpc_domain( - project_id=project_id, + project_ids=project_ids, offset=offset, limit=limit, order_by=order_by, ) + # Get unique set of all project_uuids from comp_tasks + unique_project_uuids = {task.project_uuid for task in comp_tasks} + + # Fetch latest run for each project concurrently + latest_runs = await limited_gather( + *[ + comp_runs_repo.get(user_id=user_id, project_id=project_uuid, iteration=None) + for project_uuid in unique_project_uuids + ], + limit=20, + ) + # Build a dict: project_uuid -> iteration + project_uuid_to_iteration = {run.project_uuid: run.iteration for run in latest_runs} + # Run all log fetches concurrently log_files = await limited_gather( - *[_fetch_task_log(user_id, project_id, task) for task in comp_tasks], + *[_fetch_task_log(user_id, task) for task in comp_tasks], limit=20, ) @@ -142,7 +152,10 @@ async def list_computations_latest_iteration_tasks_page( ended_at=task.ended_at, log_download_link=log_file.download_link if log_file else None, service_run_id=ServiceRunID.get_resource_tracking_run_id_for_computational( - user_id, project_id, task.node_id, comp_latest_run.iteration + user_id, + task.project_uuid, + task.node_id, + project_uuid_to_iteration[task.project_uuid], ), ) for task, log_file in zip(comp_tasks, log_files, strict=True) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py index eb706af4b83..511b53fe1c0 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py @@ -295,7 +295,7 @@ async def list_for_user_and_project_all_iterations( *, product_name: str, user_id: UserID, - project_id: ProjectID, + project_ids: list[ProjectID], # pagination offset: int, limit: int, @@ -309,7 +309,11 @@ async def list_for_user_and_project_all_iterations( *self._COMPUTATION_RUNS_RPC_GET_COLUMNS, ).where( (comp_runs.c.user_id == user_id) - & (comp_runs.c.project_uuid == f"{project_id}") + & ( + comp_runs.c.project_uuid.in_( + [f"{project_id}" for project_id in project_ids] + ) + ) & ( comp_runs.c.metadata["product_name"].astext == product_name ) # <-- NOTE: We might create a separate column for this for fast retrieval diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py index 8c32f978e2d..67f19db7e0e 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py @@ -78,7 +78,7 @@ async def list_computational_tasks( async def list_computational_tasks_rpc_domain( self, *, - project_id: ProjectID, + project_ids: list[ProjectID], # pagination offset: int = 0, limit: int = 20, @@ -100,7 +100,11 @@ async def list_computational_tasks_rpc_domain( ) .select_from(comp_tasks) .where( - (comp_tasks.c.project_id == f"{project_id}") + ( + comp_tasks.c.project_id.in_( + [f"{project_id}" for project_id in project_ids] + ) + ) & (comp_tasks.c.node_class == NodeClass.COMPUTATIONAL) ) ) diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_rpc_computations.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_rpc_computations.py index 54c91a752eb..910679901e3 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_rpc_computations.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_rpc_computations.py @@ -97,7 +97,7 @@ async def test_rpc_list_computation_runs_and_tasks( # Tasks output = await rpc_computations.list_computations_latest_iteration_tasks_page( - rpc_client, product_name="osparc", user_id=user["id"], project_id=proj.uuid + rpc_client, product_name="osparc", user_id=user["id"], project_ids=[proj.uuid] ) assert output assert output.total == 4 @@ -201,7 +201,7 @@ async def test_rpc_list_computation_runs_history( ) output = await rpc_computations.list_computations_iterations_page( - rpc_client, product_name="osparc", user_id=user["id"], project_id=proj.uuid + rpc_client, product_name="osparc", user_id=user["id"], project_ids=[proj.uuid] ) assert output.total == 3 assert isinstance(output, ComputationRunRpcGetPage) diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index 4b461f1e9e8..e60f7169086 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -2619,6 +2619,13 @@ paths: type: integer default: 0 title: Offset + - name: include_children + in: query + required: false + schema: + type: boolean + default: false + title: Include Children responses: '200': description: Successful Response @@ -2664,6 +2671,13 @@ paths: type: integer default: 0 title: Offset + - name: include_children + in: query + required: false + schema: + type: boolean + default: false + title: Include Children responses: '200': description: Successful Response diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py b/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py index fac04998d1d..d5ca46fbc83 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py @@ -32,6 +32,7 @@ ) from ..projects.projects_metadata_service import ( get_project_custom_metadata_or_empty_dict, + get_project_uuids_by_root_parent_project_id, ) from ..rabbitmq import get_rabbitmq_rpc_client @@ -120,9 +121,12 @@ async def list_computations_latest_iteration( async def list_computation_iterations( app: web.Application, + *, product_name: ProductName, user_id: UserID, project_id: ProjectID, + # filters + include_children: bool = False, # pagination offset: int, limit: NonNegativeInt, @@ -130,23 +134,37 @@ async def list_computation_iterations( order_by: OrderBy, ) -> tuple[int, list[ComputationRunWithAttributes]]: """Returns the list of computations for a specific project (all iterations)""" + await check_user_project_permission( + app, project_id=project_id, user_id=user_id, product_name=product_name + ) + + if include_children: + child_projects = await get_project_uuids_by_root_parent_project_id( + app, root_parent_project_uuid=project_id + ) + child_projects_with_root = [*child_projects, project_id] + else: + child_projects_with_root = [project_id] + rpc_client = get_rabbitmq_rpc_client(app) _runs_get = await computations.list_computations_iterations_page( rpc_client, product_name=product_name, user_id=user_id, - project_id=project_id, + project_ids=child_projects_with_root, offset=offset, limit=limit, order_by=order_by, ) + # NOTE: MD: can be improved, many times we ask for the same project # Get projects metadata - _projects_metadata = await _get_projects_metadata(app, project_uuids=[project_id]) - assert len(_projects_metadata) == 1 # nosec + _projects_metadata = await _get_projects_metadata( + app, project_uuids=[item.project_uuid for item in _runs_get.items] + ) # Get Root project names - _projects_root_names = await _get_root_project_names(app, [_runs_get.items[0]]) - assert len(_projects_root_names) == 1 # nosec + root_project_names = await batch_get_project_name(app, projects_uuids=[project_id]) + assert len(root_project_names) == 1 _computational_runs_output = [ ComputationRunWithAttributes( @@ -157,10 +175,12 @@ async def list_computation_iterations( submitted_at=item.submitted_at, started_at=item.started_at, ended_at=item.ended_at, - root_project_name=_projects_root_names[0], - project_custom_metadata=_projects_metadata[0], + root_project_name=root_project_names[0], + project_custom_metadata=project_metadata, + ) + for item, project_metadata in zip( + _runs_get.items, _projects_metadata, strict=True ) - for item in _runs_get.items ] return _runs_get.total, _computational_runs_output @@ -181,9 +201,12 @@ async def _get_credits_or_zero_by_service_run_id( async def list_computations_latest_iteration_tasks( app: web.Application, + *, product_name: ProductName, user_id: UserID, project_id: ProjectID, + # filters + include_children: bool = False, # pagination offset: int, limit: NonNegativeInt, @@ -191,25 +214,44 @@ async def list_computations_latest_iteration_tasks( order_by: OrderBy, ) -> tuple[int, list[ComputationTaskWithAttributes]]: """Returns the list of tasks for the latest iteration of a computation""" - await check_user_project_permission( app, project_id=project_id, user_id=user_id, product_name=product_name ) + if include_children: + child_projects = await get_project_uuids_by_root_parent_project_id( + app, root_parent_project_uuid=project_id + ) + child_projects_with_root = [*child_projects, project_id] + else: + child_projects_with_root = [project_id] + rpc_client = get_rabbitmq_rpc_client(app) _tasks_get = await computations.list_computations_latest_iteration_tasks_page( rpc_client, product_name=product_name, user_id=user_id, - project_id=project_id, + project_ids=child_projects_with_root, offset=offset, limit=limit, order_by=order_by, ) - # Get node names (for all project nodes) - project_dict = await get_project_dict_legacy(app, project_uuid=project_id) - workbench = project_dict["workbench"] + # Get unique set of all project_uuids from comp_tasks + unique_project_uuids = {task.project_uuid for task in _tasks_get.items} + # Fetch projects metadata concurrently + # NOTE: MD: can be improved with a single batch call + project_dicts = await limited_gather( + *[ + get_project_dict_legacy(app, project_uuid=project_uuid) + for project_uuid in unique_project_uuids + ], + limit=20, + ) + # Build a dict: project_uuid -> workbench + project_uuid_to_workbench = { + prj["project_uuid"]: prj["workbench"] for prj in project_dicts + } _service_run_ids = [item.service_run_id for item in _tasks_get.items] _is_product_billable = await is_product_billable(app, product_name=product_name) @@ -239,7 +281,9 @@ async def list_computations_latest_iteration_tasks( started_at=item.started_at, ended_at=item.ended_at, log_download_link=item.log_download_link, - node_name=workbench[f"{item.node_id}"].get("label", ""), + node_name=project_uuid_to_workbench[f"{item.project_uuid}"]["workbench"][ + f"{item.node_id}" + ].get("label", ""), osparc_credits=credits_or_none, ) for item, credits_or_none in zip( diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_computations_service_utils.py b/services/web/server/src/simcore_service_webserver/director_v2/_computations_service_utils.py deleted file mode 100644 index f05430a42a8..00000000000 --- a/services/web/server/src/simcore_service_webserver/director_v2/_computations_service_utils.py +++ /dev/null @@ -1,247 +0,0 @@ -from decimal import Decimal - -from aiohttp import web -from models_library.computations import ( - ComputationRunWithAttributes, - ComputationTaskWithAttributes, -) -from models_library.products import ProductName -from models_library.projects import ProjectID -from models_library.rest_ordering import OrderBy -from models_library.users import UserID -from pydantic import NonNegativeInt -from servicelib.rabbitmq.rpc_interfaces.director_v2 import computations -from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker import ( - credit_transactions, -) -from servicelib.utils import limited_gather - -from ..products.products_service import is_product_billable -from ..projects.api import ( - batch_get_project_name, - check_user_project_permission, - get_project_dict_legacy, -) -from ..projects.projects_metadata_service import ( - get_project_custom_metadata_or_empty_dict, -) -from ..rabbitmq import get_rabbitmq_rpc_client - - -async def _fetch_projects_metadata( - app: web.Application, - project_uuids: list[ProjectID], # Using str instead of ProjectID for compatibility -) -> list[dict]: - """Batch fetch project metadata with concurrency control""" - return await limited_gather( - *[ - get_project_custom_metadata_or_empty_dict(app, project_uuid=uuid) - for uuid in project_uuids - ], - limit=20, - ) - - -async def list_computations_latest_iteration( - app: web.Application, - product_name: ProductName, - user_id: UserID, - # filters - filter_only_running: bool, # noqa: FBT001 - # pagination - offset: int, - limit: NonNegativeInt, - # ordering - order_by: OrderBy, -) -> tuple[int, list[ComputationRunWithAttributes]]: - """Returns the list of computations (only latest iterations)""" - rpc_client = get_rabbitmq_rpc_client(app) - _runs_get = await computations.list_computations_latest_iteration_page( - rpc_client, - product_name=product_name, - user_id=user_id, - filter_only_running=filter_only_running, - offset=offset, - limit=limit, - order_by=order_by, - ) - - # Get projects metadata (NOTE: MD: can be improved with a single batch call) - _projects_metadata = await _fetch_projects_metadata( - app, project_uuids=[item.project_uuid for item in _runs_get.items] - ) - - # Get Root project names - _projects_root_uuids: list[ProjectID] = [] - for item in _runs_get.items: - if ( - prj_root_id := item.info.get("project_metadata", {}).get( - "root_parent_project_id", None - ) - ) is not None: - _projects_root_uuids.append(ProjectID(prj_root_id)) - else: - _projects_root_uuids.append(item.project_uuid) - - _projects_root_names = await batch_get_project_name( - app, projects_uuids=_projects_root_uuids - ) - - _computational_runs_output = [ - ComputationRunWithAttributes( - project_uuid=item.project_uuid, - iteration=item.iteration, - state=item.state, - info=item.info, - submitted_at=item.submitted_at, - started_at=item.started_at, - ended_at=item.ended_at, - root_project_name=project_name, - project_custom_metadata=project_metadata, - ) - for item, project_metadata, project_name in zip( - _runs_get.items, _projects_metadata, _projects_root_names, strict=True - ) - ] - - return _runs_get.total, _computational_runs_output - - -async def list_computation_iterations( - app: web.Application, - product_name: ProductName, - user_id: UserID, - project_id: ProjectID, - # pagination - offset: int, - limit: NonNegativeInt, - # ordering - order_by: OrderBy, -) -> tuple[int, list[ComputationRunWithAttributes]]: - """Returns the list of computations (only latest iterations)""" - rpc_client = get_rabbitmq_rpc_client(app) - _runs_get = await computations.list_computations_iterations_page( - rpc_client, - product_name=product_name, - user_id=user_id, - project_id=project_id, - offset=offset, - limit=limit, - order_by=order_by, - ) - - # Get projects metadata (NOTE: MD: can be improved with a single batch call) - _projects_metadata = await limited_gather( - *[ - get_project_custom_metadata_or_empty_dict( - app, project_uuid=item.project_uuid - ) - for item in _runs_get.items - ], - limit=20, - ) - - # Get Root project names - _projects_root_uuids: list[ProjectID] = [] - for item in _runs_get.items: - if ( - prj_root_id := item.info.get("project_metadata", {}).get( - "root_parent_project_id", None - ) - ) is not None: - _projects_root_uuids.append(ProjectID(prj_root_id)) - else: - _projects_root_uuids.append(item.project_uuid) - - _projects_root_names = await batch_get_project_name( - app, projects_uuids=_projects_root_uuids - ) - - _computational_runs_output = [ - ComputationRunWithAttributes( - project_uuid=item.project_uuid, - iteration=item.iteration, - state=item.state, - info=item.info, - submitted_at=item.submitted_at, - started_at=item.started_at, - ended_at=item.ended_at, - root_project_name=project_name, - project_custom_metadata=project_metadata, - ) - for item, project_metadata, project_name in zip( - _runs_get.items, _projects_metadata, _projects_root_names, strict=True - ) - ] - - return _runs_get.total, _computational_runs_output - - -async def list_computations_latest_iteration_tasks( - app: web.Application, - product_name: ProductName, - user_id: UserID, - project_id: ProjectID, - # pagination - offset: int, - limit: NonNegativeInt, - # ordering - order_by: OrderBy, -) -> tuple[int, list[ComputationTaskWithAttributes]]: - """Returns the list of tasks for the latest iteration of a computation""" - - await check_user_project_permission( - app, project_id=project_id, user_id=user_id, product_name=product_name - ) - - rpc_client = get_rabbitmq_rpc_client(app) - _tasks_get = await computations.list_computations_latest_iteration_tasks_page( - rpc_client, - product_name=product_name, - user_id=user_id, - project_id=project_id, - offset=offset, - limit=limit, - order_by=order_by, - ) - - # Get node names (for all project nodes) - project_dict = await get_project_dict_legacy(app, project_uuid=project_id) - workbench = project_dict["workbench"] - - _service_run_ids = [item.service_run_id for item in _tasks_get.items] - _is_product_billable = await is_product_billable(app, product_name=product_name) - _service_run_osparc_credits: list[Decimal | None] - if _is_product_billable: - # NOTE: MD: can be improved with a single batch call - _service_run_osparc_credits = await limited_gather( - *[ - credit_transactions.get_transaction_current_credits_by_service_run_id( - rpc_client, service_run_id=_run_id - ) - for _run_id in _service_run_ids - ], - limit=20, - ) - else: - _service_run_osparc_credits = [None for _ in _service_run_ids] - - # Final output - _tasks_get_output = [ - ComputationTaskWithAttributes( - project_uuid=item.project_uuid, - node_id=item.node_id, - state=item.state, - progress=item.progress, - image=item.image, - started_at=item.started_at, - ended_at=item.ended_at, - log_download_link=item.log_download_link, - node_name=workbench[f"{item.node_id}"].get("label", ""), - osparc_credits=credits_or_none, - ) - for item, credits_or_none in zip( - _tasks_get.items, _service_run_osparc_credits, strict=True - ) - ] - return _tasks_get.total, _tasks_get_output diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_controller/computations_rest.py b/services/web/server/src/simcore_service_webserver/director_v2/_controller/computations_rest.py index 3f13721b34a..96766fe97af 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_controller/computations_rest.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_controller/computations_rest.py @@ -2,10 +2,10 @@ from aiohttp import web from models_library.api_schemas_webserver.computations import ( - ComputationRunListQueryParams, + ComputationRunIterationsLatestListQueryParams, + ComputationRunIterationsListQueryParams, ComputationRunPathParams, ComputationRunRestGet, - ComputationRunWithFiltersListQueryParams, ComputationTaskListQueryParams, ComputationTaskPathParams, ComputationTaskRestGet, @@ -51,9 +51,9 @@ class ComputationsRequestContext(RequestParameters): async def list_computations_latest_iteration(request: web.Request) -> web.Response: req_ctx = ComputationsRequestContext.model_validate(request) - query_params: ComputationRunWithFiltersListQueryParams = ( + query_params: ComputationRunIterationsLatestListQueryParams = ( parse_request_query_parameters_as( - ComputationRunWithFiltersListQueryParams, request + ComputationRunIterationsLatestListQueryParams, request ) ) @@ -99,8 +99,10 @@ async def list_computations_latest_iteration(request: web.Request) -> web.Respon async def list_computation_iterations(request: web.Request) -> web.Response: req_ctx = ComputationsRequestContext.model_validate(request) - query_params: ComputationRunListQueryParams = parse_request_query_parameters_as( - ComputationRunListQueryParams, request + query_params: ComputationRunIterationsListQueryParams = ( + parse_request_query_parameters_as( + ComputationRunIterationsListQueryParams, request + ) ) path_params = parse_request_path_parameters_as(ComputationRunPathParams, request) @@ -109,6 +111,8 @@ async def list_computation_iterations(request: web.Request) -> web.Response: product_name=req_ctx.product_name, user_id=req_ctx.user_id, project_id=path_params.project_id, + # filters + include_children=query_params.include_children, # pagination offset=query_params.offset, limit=query_params.limit, @@ -157,6 +161,8 @@ async def list_computations_latest_iteration_tasks( product_name=req_ctx.product_name, user_id=req_ctx.user_id, project_id=path_params.project_id, + # filters + include_children=query_params.include_children, # pagination offset=query_params.offset, limit=query_params.limit, diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py index 2c72a395a5a..97c0f95a493 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py @@ -2,12 +2,16 @@ from collections.abc import Awaitable, Callable from typing import Any, TypeVar +import sqlalchemy as sa +from aiohttp import web from aiopg.sa.engine import Engine from models_library.api_schemas_webserver.projects_metadata import MetadataDict from models_library.projects import ProjectID +from models_library.projects_metadata import ProjectsMetadataDBGet from models_library.projects_nodes_io import NodeID from pydantic import TypeAdapter from simcore_postgres_database import utils_projects_metadata +from simcore_postgres_database.models.projects_metadata import projects_metadata from simcore_postgres_database.utils_projects_metadata import ( DBProjectInvalidAncestorsError, DBProjectInvalidParentNodeError, @@ -19,7 +23,13 @@ ProjectNodesNonUniqueNodeFoundError, ProjectNodesRepo, ) +from simcore_postgres_database.utils_repos import ( + get_columns_from_db_model, + pass_or_acquire_connection, +) +from sqlalchemy.ext.asyncio import AsyncConnection +from ..db.plugin import get_asyncpg_engine from .exceptions import ( NodeNotFoundError, ParentNodeNotFoundError, @@ -31,6 +41,12 @@ F = TypeVar("F", bound=Callable[..., Awaitable[Any]]) +PROJECT_METADATA_DB_COLS = get_columns_from_db_model( + projects_metadata, + ProjectsMetadataDBGet, +) + + def _handle_projects_metadata_exceptions(fct: F) -> F: """Transforms project errors -> http errors""" @@ -145,3 +161,39 @@ async def set_project_ancestors( parent_project_uuid=parent_project_uuid, parent_node_id=parent_node_id, ) + + +async def get( + app: web.Application, + connection: AsyncConnection | None = None, + *, + project_uuid: ProjectID, +) -> ProjectsMetadataDBGet: + query = sa.select(*PROJECT_METADATA_DB_COLS).where( + projects_metadata.c.project_uuid == f"{project_uuid}" + ) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.execute(query) + row = result.one_or_none() + if row is None: + raise ProjectNotFoundError(project_uuid=project_uuid) + return ProjectsMetadataDBGet.model_validate(row) + + +async def get_project_uuids_by_root_parent_project_id( + app: web.Application, + connection: AsyncConnection | None = None, + *, + root_parent_project_uuid: ProjectID, +) -> list[ProjectID]: + stmt = sa.select(projects_metadata.c.project_uuid).where( + projects_metadata.c.root_parent_project_uuid == f"{root_parent_project_uuid}" + ) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream(stmt) + output: list[ProjectID] = [ + ProjectID(row["project_uuid"]) async for row in result + ] + return output diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py index 8c6efe8d808..5d890a6c374 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py @@ -116,3 +116,11 @@ async def set_project_ancestors( parent_project_uuid=parent_project_uuid, parent_node_id=parent_node_id, ) + + +async def get_project_uuids_by_root_parent_project_id( + app: web.Application, root_parent_project_uuid: ProjectID +) -> list[ProjectID]: + return await _metadata_repository.get_project_uuids_by_root_parent_project_id( + app=app, root_parent_project_uuid=root_parent_project_uuid + ) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_metadata_service.py b/services/web/server/src/simcore_service_webserver/projects/projects_metadata_service.py index 84934a4edd3..6a82d53ae49 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_metadata_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_metadata_service.py @@ -1,6 +1,12 @@ -from ._metadata_service import get_project_custom_metadata_or_empty_dict +from ._metadata_service import ( + get_project_custom_metadata_or_empty_dict, + get_project_uuids_by_root_parent_project_id, +) -__all__: tuple[str, ...] = ("get_project_custom_metadata_or_empty_dict",) +__all__: tuple[str, ...] = ( + "get_project_custom_metadata_or_empty_dict", + "get_project_uuids_by_root_parent_project_id", +) # nopycln: file From 18058451e3ce7d3e4e332522e1624f6e35344a75 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 21 May 2025 14:09:16 +0200 Subject: [PATCH 2/9] revision id --- .../versions/4e7d8719855b_add_index_to_projects_metadata.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/4e7d8719855b_add_index_to_projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/4e7d8719855b_add_index_to_projects_metadata.py index 75ce061eddc..cadbdb5e240 100644 --- a/packages/postgres-database/src/simcore_postgres_database/migration/versions/4e7d8719855b_add_index_to_projects_metadata.py +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/4e7d8719855b_add_index_to_projects_metadata.py @@ -1,7 +1,7 @@ """add index to projects_metadata Revision ID: 4e7d8719855b -Revises: e98c45ff314f +Revises: b39f2dc87ccd Create Date: 2025-05-21 11:48:34.062860+00:00 """ @@ -10,7 +10,7 @@ # revision identifiers, used by Alembic. revision = "4e7d8719855b" -down_revision = "e98c45ff314f" +down_revision = "b39f2dc87ccd" branch_labels = None depends_on = None From 2d490ead8d0461c48823bf5fd932c23152186aee Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 21 May 2025 15:55:00 +0200 Subject: [PATCH 3/9] fix --- .../models_library/api_schemas_webserver/computations.py | 2 +- .../rpc_interfaces/resource_usage_tracker/errors.py | 3 ++- .../director/src/simcore_service_director/registry_proxy.py | 2 +- .../services/modules/db/credit_transactions_db.py | 2 +- .../director_v2/_computations_service.py | 6 ++---- 5 files changed, 7 insertions(+), 8 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/computations.py b/packages/models-library/src/models_library/api_schemas_webserver/computations.py index a3de7285621..0cd3d993b6d 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/computations.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/computations.py @@ -94,7 +94,7 @@ class ComputationRunIterationsLatestListQueryParams(ComputationRunListQueryParam class ComputationRunIterationsListQueryParams(ComputationRunListQueryParams): include_children: bool = Field( default=False, - description="If true, all tasks of the project and its children are returned (Currently supported only for root projects)", + description="If true, all computational runs of the project and its children are returned (Currently supported only for root projects)", ) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py index 8729c77462d..ab11d42deeb 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py @@ -35,7 +35,8 @@ class WalletTransactionError(OsparcErrorMixin, Exception): msg_template = "{msg}" -class CreditTransactionNotFoundError(OsparcErrorMixin, Exception): ... +class CreditTransactionNotFoundError(OsparcErrorMixin, Exception): + msg_template = "Credit transaction for service run id {service_run_id} not found." ### Pricing Plans Error diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 56b5d812f8c..e577be686ec 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -226,7 +226,7 @@ async def _setup_registry(app: FastAPI) -> None: reraise=True, ) async def _wait_until_registry_responsive(app: FastAPI) -> None: - await _basic_auth_registry_request(app, path="", method="HEAD", timeout=1.0) + await _basic_auth_registry_request(app, path="", method="HEAD", timeout=10.0) with log_context(_logger, logging.INFO, msg="Connecting to docker registry"): await _wait_until_registry_responsive(app) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py index f28ab1c54de..abbdf976f80 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py @@ -229,5 +229,5 @@ async def get_transaction_current_credits_by_service_run_id( result = await conn.execute(select_stmt) row = result.first() if row is None: - raise CreditTransactionNotFoundError + raise CreditTransactionNotFoundError(service_run_id=service_run_id) return Decimal(row[0]) diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py b/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py index d5ca46fbc83..22667ded1c8 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py @@ -249,9 +249,7 @@ async def list_computations_latest_iteration_tasks( limit=20, ) # Build a dict: project_uuid -> workbench - project_uuid_to_workbench = { - prj["project_uuid"]: prj["workbench"] for prj in project_dicts - } + project_uuid_to_workbench = {prj["uuid"]: prj["workbench"] for prj in project_dicts} _service_run_ids = [item.service_run_id for item in _tasks_get.items] _is_product_billable = await is_product_billable(app, product_name=product_name) @@ -281,7 +279,7 @@ async def list_computations_latest_iteration_tasks( started_at=item.started_at, ended_at=item.ended_at, log_download_link=item.log_download_link, - node_name=project_uuid_to_workbench[f"{item.project_uuid}"]["workbench"][ + node_name=project_uuid_to_workbench[f"{item.project_uuid}"][ f"{item.node_id}" ].get("label", ""), osparc_credits=credits_or_none, From 3d131ceaed82db679b7f69876312726f485f9c72 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 21 May 2025 15:59:56 +0200 Subject: [PATCH 4/9] revert: --- .../director/src/simcore_service_director/registry_proxy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index e577be686ec..56b5d812f8c 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -226,7 +226,7 @@ async def _setup_registry(app: FastAPI) -> None: reraise=True, ) async def _wait_until_registry_responsive(app: FastAPI) -> None: - await _basic_auth_registry_request(app, path="", method="HEAD", timeout=10.0) + await _basic_auth_registry_request(app, path="", method="HEAD", timeout=1.0) with log_context(_logger, logging.INFO, msg="Connecting to docker registry"): await _wait_until_registry_responsive(app) From bc2f32d1db76ed1e51ae8840b03d99e20cb009aa Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 21 May 2025 17:25:29 +0200 Subject: [PATCH 5/9] revision fix --- .../versions/4e7d8719855b_add_index_to_projects_metadata.py | 4 ++-- .../director/src/simcore_service_director/registry_proxy.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/4e7d8719855b_add_index_to_projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/4e7d8719855b_add_index_to_projects_metadata.py index cadbdb5e240..b4961eb7306 100644 --- a/packages/postgres-database/src/simcore_postgres_database/migration/versions/4e7d8719855b_add_index_to_projects_metadata.py +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/4e7d8719855b_add_index_to_projects_metadata.py @@ -1,7 +1,7 @@ """add index to projects_metadata Revision ID: 4e7d8719855b -Revises: b39f2dc87ccd +Revises: ba9c4816a31b Create Date: 2025-05-21 11:48:34.062860+00:00 """ @@ -10,7 +10,7 @@ # revision identifiers, used by Alembic. revision = "4e7d8719855b" -down_revision = "b39f2dc87ccd" +down_revision = "ba9c4816a31b" branch_labels = None depends_on = None diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 56b5d812f8c..e577be686ec 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -226,7 +226,7 @@ async def _setup_registry(app: FastAPI) -> None: reraise=True, ) async def _wait_until_registry_responsive(app: FastAPI) -> None: - await _basic_auth_registry_request(app, path="", method="HEAD", timeout=1.0) + await _basic_auth_registry_request(app, path="", method="HEAD", timeout=10.0) with log_context(_logger, logging.INFO, msg="Connecting to docker registry"): await _wait_until_registry_responsive(app) From fdcd70392547f5138aa8e7b38882f150eebf0834 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Wed, 21 May 2025 18:32:37 +0200 Subject: [PATCH 6/9] fix test --- .../server/tests/unit/with_dbs/01/test_director_v2_handlers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py b/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py index 363c36cf995..4556878b0e0 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py @@ -181,9 +181,11 @@ def mock_rpc_list_computations_latest_iteration_tasks_page( mocker: MockerFixture, user_project: ProjectDict, ) -> ComputationTaskRpcGetPage: + project_uuid = user_project["uuid"] workbench_ids = list(user_project["workbench"].keys()) example = ComputationTaskRpcGet.model_config["json_schema_extra"]["examples"][0] example["node_id"] = workbench_ids[0] + example["project_uuid"] = project_uuid return mocker.patch( "simcore_service_webserver.director_v2._computations_service.computations.list_computations_latest_iteration_tasks_page", From a71e934e31a5b2d215c95c429f5d5c8ae4363419 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Thu, 22 May 2025 08:17:47 +0200 Subject: [PATCH 7/9] review @sanderegg --- .../src/models_library/projects_metadata.py | 17 ------------ .../projects/_metadata_repository.py | 26 ------------------- 2 files changed, 43 deletions(-) delete mode 100644 packages/models-library/src/models_library/projects_metadata.py diff --git a/packages/models-library/src/models_library/projects_metadata.py b/packages/models-library/src/models_library/projects_metadata.py deleted file mode 100644 index 0290ff03d0d..00000000000 --- a/packages/models-library/src/models_library/projects_metadata.py +++ /dev/null @@ -1,17 +0,0 @@ -from datetime import datetime -from typing import Any - -from pydantic import BaseModel - -from .projects import ProjectID - - -class ProjectsMetadataDBGet(BaseModel): - project_uuid: ProjectID - custom: dict[str, Any] - created: datetime - modified: datetime - parent_project_uuid: ProjectID - parent_node_id: ProjectID - root_parent_project_uuid: ProjectID - root_parent_node_id: ProjectID diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py index 97c0f95a493..622953ddcd2 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py @@ -7,7 +7,6 @@ from aiopg.sa.engine import Engine from models_library.api_schemas_webserver.projects_metadata import MetadataDict from models_library.projects import ProjectID -from models_library.projects_metadata import ProjectsMetadataDBGet from models_library.projects_nodes_io import NodeID from pydantic import TypeAdapter from simcore_postgres_database import utils_projects_metadata @@ -24,7 +23,6 @@ ProjectNodesRepo, ) from simcore_postgres_database.utils_repos import ( - get_columns_from_db_model, pass_or_acquire_connection, ) from sqlalchemy.ext.asyncio import AsyncConnection @@ -41,12 +39,6 @@ F = TypeVar("F", bound=Callable[..., Awaitable[Any]]) -PROJECT_METADATA_DB_COLS = get_columns_from_db_model( - projects_metadata, - ProjectsMetadataDBGet, -) - - def _handle_projects_metadata_exceptions(fct: F) -> F: """Transforms project errors -> http errors""" @@ -163,24 +155,6 @@ async def set_project_ancestors( ) -async def get( - app: web.Application, - connection: AsyncConnection | None = None, - *, - project_uuid: ProjectID, -) -> ProjectsMetadataDBGet: - query = sa.select(*PROJECT_METADATA_DB_COLS).where( - projects_metadata.c.project_uuid == f"{project_uuid}" - ) - - async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: - result = await conn.execute(query) - row = result.one_or_none() - if row is None: - raise ProjectNotFoundError(project_uuid=project_uuid) - return ProjectsMetadataDBGet.model_validate(row) - - async def get_project_uuids_by_root_parent_project_id( app: web.Application, connection: AsyncConnection | None = None, From ffee4d5c342728eb3024d8801934df098273a35d Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Thu, 22 May 2025 08:18:31 +0200 Subject: [PATCH 8/9] revert --- .../director/src/simcore_service_director/registry_proxy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index e577be686ec..56b5d812f8c 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -226,7 +226,7 @@ async def _setup_registry(app: FastAPI) -> None: reraise=True, ) async def _wait_until_registry_responsive(app: FastAPI) -> None: - await _basic_auth_registry_request(app, path="", method="HEAD", timeout=10.0) + await _basic_auth_registry_request(app, path="", method="HEAD", timeout=1.0) with log_context(_logger, logging.INFO, msg="Connecting to docker registry"): await _wait_until_registry_responsive(app) From 5153f83d808f91f384f675fc1a5b003245b5cb4d Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Thu, 22 May 2025 09:06:04 +0200 Subject: [PATCH 9/9] review @pcrespov --- .../services/modules/db/credit_transactions_db.py | 2 +- .../projects/_metadata_repository.py | 7 ++----- .../projects/_metadata_service.py | 2 +- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py index abbdf976f80..b9b5f3569c5 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py @@ -227,7 +227,7 @@ async def get_transaction_current_credits_by_service_run_id( resource_tracker_credit_transactions.c.service_run_id == f"{service_run_id}" ) result = await conn.execute(select_stmt) - row = result.first() + row = result.one_or_none() if row is None: raise CreditTransactionNotFoundError(service_run_id=service_run_id) return Decimal(row[0]) diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py index 622953ddcd2..d943e72e6f9 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py @@ -155,7 +155,7 @@ async def set_project_ancestors( ) -async def get_project_uuids_by_root_parent_project_id( +async def list_project_uuids_by_root_parent_project_id( app: web.Application, connection: AsyncConnection | None = None, *, @@ -167,7 +167,4 @@ async def get_project_uuids_by_root_parent_project_id( async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: result = await conn.stream(stmt) - output: list[ProjectID] = [ - ProjectID(row["project_uuid"]) async for row in result - ] - return output + return [ProjectID(row["project_uuid"]) async for row in result] diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py index 5d890a6c374..3d77fa6a74f 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py @@ -121,6 +121,6 @@ async def set_project_ancestors( async def get_project_uuids_by_root_parent_project_id( app: web.Application, root_parent_project_uuid: ProjectID ) -> list[ProjectID]: - return await _metadata_repository.get_project_uuids_by_root_parent_project_id( + return await _metadata_repository.list_project_uuids_by_root_parent_project_id( app=app, root_parent_project_uuid=root_parent_project_uuid )