|
38 | 38 | from ..projects.api import ( |
39 | 39 | batch_get_project_name, |
40 | 40 | check_user_project_permission, |
41 | | - get_project_dict_legacy, |
42 | 41 | ) |
43 | 42 | from ..projects.projects_metadata_service import ( |
44 | 43 | get_project_custom_metadata_or_empty_dict, |
|
48 | 47 | from ._comp_runs_collections_service import get_comp_run_collection_or_none_by_id |
49 | 48 |
|
50 | 49 |
|
| 50 | +async def _wrap_with_id( |
| 51 | + project_id: ProjectID, coro: Awaitable[list[tuple[NodeID, Node]]] |
| 52 | +) -> tuple[ProjectID, dict[NodeID, Node]]: |
| 53 | + nodes = await coro |
| 54 | + return project_id, dict(nodes) |
| 55 | + |
| 56 | + |
51 | 57 | async def _get_projects_metadata( |
52 | 58 | app: web.Application, |
53 | 59 | project_uuids: list[ProjectID], |
@@ -253,12 +259,6 @@ async def list_computations_latest_iteration_tasks( |
253 | 259 | # Fetch projects metadata concurrently |
254 | 260 | # NOTE: MD: can be improved with a single batch call |
255 | 261 |
|
256 | | - async def _wrap_with_id( |
257 | | - project_id: ProjectID, coro: Awaitable[list[tuple[NodeID, Node]]] |
258 | | - ) -> tuple[ProjectID, dict[NodeID, Node]]: |
259 | | - nodes = await coro |
260 | | - return project_id, dict(nodes) |
261 | | - |
262 | 262 | results = await limited_gather( |
263 | 263 | *[ |
264 | 264 | _wrap_with_id(project_uuid, get_by_project(app, project_id=project_uuid)) |
@@ -299,7 +299,7 @@ async def _wrap_with_id( |
299 | 299 | ended_at=item.ended_at, |
300 | 300 | log_download_link=item.log_download_link, |
301 | 301 | node_name=project_uuid_to_workbench[item.project_uuid][item.node_id].label |
302 | | - or "", |
| 302 | + or "Unknown", |
303 | 303 | osparc_credits=credits_or_none, |
304 | 304 | ) |
305 | 305 | for item, credits_or_none in zip( |
@@ -421,15 +421,16 @@ async def list_computation_collection_run_tasks( |
421 | 421 | # Get unique set of all project_uuids from comp_tasks |
422 | 422 | unique_project_uuids = {task.project_uuid for task in _tasks_get.items} |
423 | 423 | # NOTE: MD: can be improved with a single batch call |
424 | | - project_dicts = await limited_gather( |
| 424 | + results = await limited_gather( |
425 | 425 | *[ |
426 | | - get_project_dict_legacy(app, project_uuid=project_uuid) |
| 426 | + _wrap_with_id(project_uuid, get_by_project(app, project_id=project_uuid)) |
427 | 427 | for project_uuid in unique_project_uuids |
428 | 428 | ], |
429 | 429 | limit=20, |
430 | 430 | ) |
| 431 | + |
431 | 432 | # Build a dict: project_uuid -> workbench |
432 | | - project_uuid_to_workbench = {prj["uuid"]: prj["workbench"] for prj in project_dicts} |
| 433 | + project_uuid_to_workbench: dict[ProjectID, dict[NodeID, Node]] = dict(results) |
433 | 434 |
|
434 | 435 | # Fetch projects metadata concurrently |
435 | 436 | _projects_metadata = await _get_projects_metadata( |
@@ -466,9 +467,7 @@ async def list_computation_collection_run_tasks( |
466 | 467 | log_download_link=item.log_download_link, |
467 | 468 | name=( |
468 | 469 | custom_metadata.get("job_name") |
469 | | - or project_uuid_to_workbench[f"{item.project_uuid}"][ |
470 | | - f"{item.node_id}" |
471 | | - ].get("label") |
| 470 | + or project_uuid_to_workbench[item.project_uuid][item.node_id].label |
472 | 471 | or "Unknown" |
473 | 472 | ), |
474 | 473 | osparc_credits=credits_or_none, |
|
0 commit comments