|
5 | 5 |
|
6 | 6 | """ |
7 | 7 |
|
| 8 | +from typing import Any, Coroutine |
| 9 | + |
8 | 10 | from aiohttp import web |
9 | 11 | from models_library.folders import FolderID, FolderQuery, FolderScope |
10 | 12 | from models_library.projects import ProjectID |
|
15 | 17 | from servicelib.utils import logged_gather |
16 | 18 | from simcore_postgres_database.models.projects import ProjectType |
17 | 19 | from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB |
| 20 | +from simcore_service_webserver.projects._permalink_api import ( |
| 21 | + aggregate_permalink_in_project, |
| 22 | +) |
18 | 23 | from simcore_service_webserver.projects._projects_db import ( |
19 | 24 | batch_get_trashed_by_primary_gid, |
20 | 25 | ) |
|
27 | 32 | from .models import ProjectDict, ProjectTypeAPI |
28 | 33 |
|
29 | 34 |
|
30 | | -async def _batch_update_list_of_project_dict( |
31 | | - app: web.Application, list_of_project_dict: list[ProjectDict] |
32 | | -) -> list[ProjectDict]: |
| 35 | +def _batch_update( |
| 36 | + key: str, |
| 37 | + value_per_object: list[Any], |
| 38 | + objects: list[dict[str, Any]], |
| 39 | +): |
| 40 | + for obj, value in zip(objects, value_per_object, strict=True): |
| 41 | + obj[key] = value |
| 42 | + return objects |
| 43 | + |
| 44 | + |
| 45 | +async def _paralell_update(*update_per_object: Coroutine): |
| 46 | + return await logged_gather( |
| 47 | + *update_per_object, |
| 48 | + reraise=True, |
| 49 | + max_concurrency=100, |
| 50 | + ) |
33 | 51 |
|
34 | | - # updating `trashed_by_primary_gid` |
| 52 | + |
| 53 | +async def _aggregate_data_to_projects_from_other_sources( |
| 54 | + app: web.Application, |
| 55 | + *, |
| 56 | + db_projects: list[ProjectDict], |
| 57 | + db_project_types: list[ProjectTypeDB], |
| 58 | + user_id: UserID, |
| 59 | +) -> list[ProjectDict]: |
| 60 | + """ |
| 61 | + Aggregates data to each project from other sources, first as a batch-update and then as a parallel-update. |
| 62 | + """ |
| 63 | + # updating `project.trashed_by_primary_gid` |
35 | 64 | trashed_by_primary_gid_values = await batch_get_trashed_by_primary_gid( |
36 | | - app, projects_uuids=[ProjectID(p["uuid"]) for p in list_of_project_dict] |
| 65 | + app, projects_uuids=[ProjectID(p["uuid"]) for p in db_projects] |
| 66 | + ) |
| 67 | + |
| 68 | + _batch_update("trashed_by_primary_gid", trashed_by_primary_gid_values, db_projects) |
| 69 | + |
| 70 | + # udpating `project.state` |
| 71 | + update_state_per_project = [ |
| 72 | + projects_service.add_project_states_for_user( |
| 73 | + user_id=user_id, |
| 74 | + project=prj, |
| 75 | + is_template=prj_type == ProjectTypeDB.TEMPLATE, |
| 76 | + app=app, |
| 77 | + ) |
| 78 | + for prj, prj_type in zip(db_projects, db_project_types, strict=False) |
| 79 | + ] |
| 80 | + |
| 81 | + updated_projects: list[ProjectDict] = await _paralell_update( |
| 82 | + *update_state_per_project, |
37 | 83 | ) |
38 | 84 |
|
39 | | - for project_dict, value in zip( |
40 | | - list_of_project_dict, trashed_by_primary_gid_values, strict=True |
41 | | - ): |
42 | | - project_dict.update(trashed_by_primary_gid=value) |
| 85 | + return updated_projects |
| 86 | + |
43 | 87 |
|
44 | | - return list_of_project_dict |
| 88 | +async def aggregate_data_to_projects_from_request( |
| 89 | + request: web.Request, |
| 90 | + projects: list[ProjectDict], |
| 91 | +) -> list[ProjectDict]: |
| 92 | + |
| 93 | + update_permalink_per_project = [ |
| 94 | + # permalink |
| 95 | + aggregate_permalink_in_project(request, project=prj) |
| 96 | + for prj in projects |
| 97 | + ] |
| 98 | + |
| 99 | + updated_projects: list[ProjectDict] = await _paralell_update( |
| 100 | + *update_permalink_per_project, |
| 101 | + ) |
| 102 | + return updated_projects |
45 | 103 |
|
46 | 104 |
|
47 | 105 | async def list_projects( # pylint: disable=too-many-arguments |
@@ -122,23 +180,8 @@ async def list_projects( # pylint: disable=too-many-arguments |
122 | 180 | order_by=order_by, |
123 | 181 | ) |
124 | 182 |
|
125 | | - # AGGREGATE data to the project from other sources, first some sources |
126 | | - # as batch-update and then as parallel-update |
127 | | - db_projects = await _batch_update_list_of_project_dict(app, db_projects) |
128 | | - |
129 | | - projects: list[ProjectDict] = await logged_gather( |
130 | | - *( |
131 | | - # state |
132 | | - projects_service.add_project_states_for_user( |
133 | | - user_id=user_id, |
134 | | - project=prj, |
135 | | - is_template=prj_type == ProjectTypeDB.TEMPLATE, |
136 | | - app=app, |
137 | | - ) |
138 | | - for prj, prj_type in zip(db_projects, db_project_types, strict=False) |
139 | | - ), |
140 | | - reraise=True, |
141 | | - max_concurrency=100, |
| 183 | + projects = await _aggregate_data_to_projects_from_other_sources( |
| 184 | + app, db_projects=db_projects, db_project_types=db_project_types, user_id=user_id |
142 | 185 | ) |
143 | 186 |
|
144 | 187 | return projects, total_number_projects |
@@ -182,23 +225,8 @@ async def list_projects_full_depth( |
182 | 225 | order_by=order_by, |
183 | 226 | ) |
184 | 227 |
|
185 | | - # AGGREGATE data to the project from other sources, first some sources |
186 | | - # as BATCH-update and then as PARALLEL-update |
187 | | - db_projects = await _batch_update_list_of_project_dict(app, db_projects) |
188 | | - |
189 | | - projects: list[ProjectDict] = await logged_gather( |
190 | | - *( |
191 | | - # state |
192 | | - projects_service.add_project_states_for_user( |
193 | | - user_id=user_id, |
194 | | - project=prj, |
195 | | - is_template=prj_type == ProjectTypeDB.TEMPLATE, |
196 | | - app=app, |
197 | | - ) |
198 | | - for prj, prj_type in zip(db_projects, db_project_types, strict=False) |
199 | | - ), |
200 | | - reraise=True, |
201 | | - max_concurrency=100, |
| 228 | + projects = await _aggregate_data_to_projects_from_other_sources( |
| 229 | + app, db_projects=db_projects, db_project_types=db_project_types, user_id=user_id |
202 | 230 | ) |
203 | 231 |
|
204 | 232 | return projects, total_number_projects |
0 commit comments