|
9 | 9 | from simcore_postgres_database.models.project_to_groups import project_to_groups |
10 | 10 | from simcore_postgres_database.models.projects import projects |
11 | 11 | from simcore_postgres_database.models.projects_metadata import projects_metadata |
12 | | -from simcore_postgres_database.models.projects_nodes import projects_nodes |
13 | 12 | from simcore_postgres_database.models.projects_to_jobs import projects_to_jobs |
14 | 13 | from simcore_postgres_database.models.projects_to_products import projects_to_products |
| 14 | +from simcore_postgres_database.utils_projects_nodes import make_workbench_subquery |
15 | 15 | from simcore_postgres_database.utils_repos import ( |
16 | 16 | get_columns_from_db_model, |
17 | 17 | pass_or_acquire_connection, |
@@ -171,50 +171,7 @@ async def list_projects_marked_as_jobs( |
171 | 171 | total_query = sa.select(sa.func.count()).select_from(base_query) |
172 | 172 |
|
173 | 173 | # Step 6: Create subquery to aggregate project nodes into workbench structure |
174 | | - workbench_subquery = ( |
175 | | - sa.select( |
176 | | - projects_nodes.c.project_uuid, |
177 | | - sa.func.json_object_agg( |
178 | | - projects_nodes.c.node_id, |
179 | | - sa.func.json_build_object( |
180 | | - "key", |
181 | | - projects_nodes.c.key, |
182 | | - "version", |
183 | | - projects_nodes.c.version, |
184 | | - "label", |
185 | | - projects_nodes.c.label, |
186 | | - "progress", |
187 | | - projects_nodes.c.progress, |
188 | | - "thumbnail", |
189 | | - projects_nodes.c.thumbnail, |
190 | | - "inputAccess", |
191 | | - projects_nodes.c.input_access, |
192 | | - "inputNodes", |
193 | | - projects_nodes.c.input_nodes, |
194 | | - "inputs", |
195 | | - projects_nodes.c.inputs, |
196 | | - "inputsRequired", |
197 | | - projects_nodes.c.inputs_required, |
198 | | - "inputsUnits", |
199 | | - projects_nodes.c.inputs_units, |
200 | | - "outputNodes", |
201 | | - projects_nodes.c.output_nodes, |
202 | | - "outputs", |
203 | | - projects_nodes.c.outputs, |
204 | | - "runHash", |
205 | | - projects_nodes.c.run_hash, |
206 | | - "state", |
207 | | - projects_nodes.c.state, |
208 | | - "parent", |
209 | | - projects_nodes.c.parent, |
210 | | - "bootOptions", |
211 | | - projects_nodes.c.boot_options, |
212 | | - ), |
213 | | - ).label("workbench"), |
214 | | - ) |
215 | | - .group_by(projects_nodes.c.project_uuid) |
216 | | - .subquery() |
217 | | - ) |
| 174 | + workbench_subquery = make_workbench_subquery() |
218 | 175 |
|
219 | 176 | # Step 7: Query to get the paginated list with full selection |
220 | 177 | list_query = ( |
@@ -242,7 +199,7 @@ async def list_projects_marked_as_jobs( |
242 | 199 | .offset(pagination_offset) |
243 | 200 | ) |
244 | 201 |
|
245 | | - # Step 8: Execute queries |
| 202 | + # Step 7: Execute queries |
246 | 203 | async with pass_or_acquire_connection(self.engine, connection) as conn: |
247 | 204 | total_count = await conn.scalar(total_query) |
248 | 205 | assert isinstance(total_count, int) # nosec |
|
0 commit comments