|
2 | 2 | # pylint: disable=unused-argument |
3 | 3 | # pylint: disable=unused-variable |
4 | 4 |
|
| 5 | +import contextlib |
5 | 6 | from collections.abc import AsyncIterator, Awaitable, Callable |
6 | 7 | from contextlib import asynccontextmanager |
7 | 8 | from typing import Any |
|
20 | 21 | from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine |
21 | 22 |
|
22 | 23 | from .helpers.faker_factories import DEFAULT_FAKER, random_project |
| 24 | +from .helpers.postgres_tools import insert_and_get_row_lifespan |
23 | 25 | from .helpers.postgres_users import insert_and_get_user_and_secrets_lifespan |
24 | 26 |
|
25 | 27 |
|
@@ -215,28 +217,38 @@ async def _() -> None: |
215 | 217 | @pytest.fixture |
216 | 218 | async def create_project_node( |
217 | 219 | user_id: UserID, sqlalchemy_async_engine: AsyncEngine, faker: Faker |
218 | | -) -> Callable[..., Awaitable[tuple[NodeID, dict[str, Any]]]]: |
219 | | - async def _creator( |
220 | | - project_id: ProjectID, node_id: NodeID | None = None, **kwargs |
221 | | - ) -> tuple[NodeID, dict[str, Any]]: |
222 | | - async with sqlalchemy_async_engine.begin() as conn: |
| 220 | +) -> AsyncIterator[Callable[..., Awaitable[tuple[NodeID, dict[str, Any]]]]]: |
| 221 | + created_node_entries: list[tuple[NodeID, ProjectID]] = [] |
| 222 | + |
| 223 | + async with contextlib.AsyncExitStack() as stack: |
| 224 | + |
| 225 | + async def _creator( |
| 226 | + project_id: ProjectID, node_id: NodeID | None = None, **kwargs |
| 227 | + ) -> tuple[NodeID, dict[str, Any]]: |
223 | 228 | new_node_id = node_id or NodeID(faker.uuid4()) |
224 | 229 | node_values = { |
| 230 | + "node_id": f"{new_node_id}", |
| 231 | + "project_uuid": f"{project_id}", |
225 | 232 | "key": "simcore/services/frontend/file-picker", |
226 | 233 | "version": "1.0.0", |
227 | 234 | "label": "pytest_fake_node", |
| 235 | + **kwargs, |
228 | 236 | } |
229 | | - node_values.update(**kwargs) |
230 | | - result = await conn.execute( |
231 | | - projects_nodes.insert() |
232 | | - .values( |
233 | | - node_id=f"{new_node_id}", |
234 | | - project_uuid=f"{project_id}", |
235 | | - **node_values, |
| 237 | + |
| 238 | + node_row = await stack.enter_async_context( |
| 239 | + insert_and_get_row_lifespan( |
| 240 | + sqlalchemy_async_engine, |
| 241 | + table=projects_nodes, |
| 242 | + values=node_values, |
| 243 | + pk_col=projects_nodes.c.node_id, |
| 244 | + pk_value=f"{new_node_id}", |
236 | 245 | ) |
237 | | - .returning(sa.literal_column("*")) |
238 | 246 | ) |
239 | | - row = result.one() |
240 | | - return new_node_id, row._asdict() |
241 | 247 |
|
242 | | - return _creator |
| 248 | + created_node_entries.append((new_node_id, project_id)) |
| 249 | + return new_node_id, node_row |
| 250 | + |
| 251 | + yield _creator |
| 252 | + |
| 253 | + # Cleanup is handled automatically by insert_and_get_row_lifespan |
| 254 | + print("Deleting ", created_node_entries) |
0 commit comments