|
7 | 7 | import asyncio |
8 | 8 | import json |
9 | 9 | import time |
10 | | -from collections.abc import Callable |
| 10 | +from collections.abc import Awaitable, Callable |
11 | 11 | from copy import deepcopy |
12 | 12 | from pathlib import Path |
13 | 13 | from typing import Any, NamedTuple |
14 | 14 |
|
15 | 15 | import pytest |
| 16 | +import socketio |
16 | 17 | import sqlalchemy as sa |
17 | 18 | from aiohttp.test_utils import TestClient |
18 | 19 | from common_library.json_serialization import json_dumps |
@@ -536,14 +537,12 @@ async def populated_project_metadata( |
536 | 537 | with postgres_db.connect() as con: |
537 | 538 | con.execute( |
538 | 539 | projects_metadata.insert().values( |
539 | | - **{ |
540 | | - "project_uuid": project_uuid, |
541 | | - "custom": { |
542 | | - "job_name": "My Job Name", |
543 | | - "group_id": faker.uuid4(), |
544 | | - "group_name": "My Group Name", |
545 | | - }, |
546 | | - } |
| 540 | + project_uuid=project_uuid, |
| 541 | + custom={ |
| 542 | + "job_name": "My Job Name", |
| 543 | + "group_id": faker.uuid4(), |
| 544 | + "group_name": "My Group Name", |
| 545 | + }, |
547 | 546 | ) |
548 | 547 | ) |
549 | 548 | yield |
@@ -579,3 +578,56 @@ async def test_start_multiple_computation_with_the_same_collection_run_id( |
579 | 578 |
|
580 | 579 | # NOTE: This tests that there is only one entry in comp_runs_collections table created |
581 | 580 | # as the project metadata has the same group_id |
| 581 | + |
| 582 | + |
| 583 | +@pytest.mark.parametrize(*user_role_response(), ids=str) |
| 584 | +async def test_running_computation_sends_progress_updates_via_socketio( |
| 585 | + client: TestClient, |
| 586 | + sleeper_service: dict[str, str], |
| 587 | + postgres_db: sa.engine.Engine, |
| 588 | + logged_user: dict[str, Any], |
| 589 | + user_project: dict[str, Any], |
| 590 | + fake_workbench_adjacency_list: dict[str, Any], |
| 591 | + expected: _ExpectedResponseTuple, |
| 592 | + create_socketio_connection: Callable[ |
| 593 | + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] |
| 594 | + ], |
| 595 | +): |
| 596 | + assert client.app |
| 597 | + socket_io_conn, *_ = await create_socketio_connection(None, client) |
| 598 | + |
| 599 | + project_id = user_project["uuid"] |
| 600 | + |
| 601 | + url_start = client.app.router["start_computation"].url_for(project_id=project_id) |
| 602 | + assert url_start == URL(f"/{API_VTAG}/computations/{project_id}:start") |
| 603 | + |
| 604 | + # POST /v0/computations/{project_id}:start |
| 605 | + resp = await client.post(f"{url_start}") |
| 606 | + data, error = await assert_status(resp, status.HTTP_201_CREATED) |
| 607 | + |
| 608 | + if error: |
| 609 | + return |
| 610 | + |
| 611 | + assert "pipeline_id" in data |
| 612 | + assert data["pipeline_id"] == project_id |
| 613 | + |
| 614 | + _assert_db_contents( |
| 615 | + project_id, |
| 616 | + postgres_db, |
| 617 | + user_project["workbench"], |
| 618 | + fake_workbench_adjacency_list, |
| 619 | + check_outputs=False, |
| 620 | + ) |
| 621 | + |
| 622 | + # wait for the computation to complete successfully |
| 623 | + await _assert_and_wait_for_pipeline_state( |
| 624 | + client, |
| 625 | + project_id, |
| 626 | + RunningState.SUCCESS, |
| 627 | + _ExpectedResponseTuple( |
| 628 | + ok=status.HTTP_200_OK, |
| 629 | + created=status.HTTP_201_CREATED, |
| 630 | + no_content=status.HTTP_204_NO_CONTENT, |
| 631 | + confict=status.HTTP_409_CONFLICT, |
| 632 | + ), |
| 633 | + ) |
0 commit comments