|
16 | 16 | import sqlalchemy as sa |
17 | 17 | from aiohttp.test_utils import TestClient |
18 | 18 | from common_library.json_serialization import json_dumps |
| 19 | +from faker import Faker |
19 | 20 | from models_library.projects_state import RunningState |
20 | 21 | from pytest_simcore.helpers.assert_checks import assert_status |
21 | 22 | from servicelib.aiohttp import status |
22 | 23 | from servicelib.aiohttp.application import create_safe_application |
23 | 24 | from servicelib.status_codes_utils import get_code_display_name |
24 | 25 | from settings_library.rabbit import RabbitSettings |
25 | 26 | from settings_library.redis import RedisSettings |
| 27 | +from simcore_postgres_database.models.comp_runs_collections import comp_runs_collections |
26 | 28 | from simcore_postgres_database.models.projects import projects |
| 29 | +from simcore_postgres_database.models.projects_metadata import projects_metadata |
27 | 30 | from simcore_postgres_database.models.users import UserRole |
28 | 31 | from simcore_postgres_database.webserver_models import ( |
29 | 32 | NodeClass, |
@@ -518,3 +521,61 @@ async def test_run_pipeline_and_check_state( |
518 | 521 | ) |
519 | 522 |
|
520 | 523 | print(f"<-- pipeline completed successfully in {time.monotonic() - start} seconds") |
| 524 | + |
| 525 | + |
| 526 | +@pytest.fixture |
| 527 | +async def populated_project_metadata( |
| 528 | + client: TestClient, |
| 529 | + logged_user: dict[str, Any], |
| 530 | + user_project: dict[str, Any], |
| 531 | + faker: Faker, |
| 532 | + postgres_db: sa.engine.Engine, |
| 533 | +): |
| 534 | + assert client.app |
| 535 | + project_uuid = user_project["uuid"] |
| 536 | + with postgres_db.connect() as con: |
| 537 | + con.execute( |
| 538 | + projects_metadata.insert().values( |
| 539 | + **{ |
| 540 | + "project_uuid": project_uuid, |
| 541 | + "custom": { |
| 542 | + "job_name": "My Job Name", |
| 543 | + "group_id": faker.uuid4(), |
| 544 | + "group_name": "My Group Name", |
| 545 | + }, |
| 546 | + } |
| 547 | + ) |
| 548 | + ) |
| 549 | + yield |
| 550 | + con.execute(projects_metadata.delete()) |
| 551 | + con.execute(comp_runs_collections.delete()) # cleanup |
| 552 | + |
| 553 | + |
| 554 | +@pytest.mark.parametrize(*user_role_response(), ids=str) |
| 555 | +async def test_start_multiple_computation_with_the_same_collection_run_id( |
| 556 | + client: TestClient, |
| 557 | + sleeper_service: dict[str, str], |
| 558 | + postgres_db: sa.engine.Engine, |
| 559 | + populated_project_metadata: None, |
| 560 | + logged_user: dict[str, Any], |
| 561 | + user_project: dict[str, Any], |
| 562 | + fake_workbench_adjacency_list: dict[str, Any], |
| 563 | + user_role: UserRole, |
| 564 | + expected: _ExpectedResponseTuple, |
| 565 | +): |
| 566 | + assert client.app |
| 567 | + project_id = user_project["uuid"] |
| 568 | + |
| 569 | + url_start = client.app.router["start_computation"].url_for(project_id=project_id) |
| 570 | + assert url_start == URL(f"/{API_VTAG}/computations/{project_id}:start") |
| 571 | + |
| 572 | + # POST /v0/computations/{project_id}:start |
| 573 | + resp = await client.post(f"{url_start}") |
| 574 | + await assert_status(resp, expected.created) |
| 575 | + |
| 576 | + resp = await client.post(f"{url_start}") |
| 577 | + # starting again should be disallowed, since it's already running |
| 578 | + assert resp.status == expected.confict |
| 579 | + |
| 580 | + # NOTE: This tests that there is only one entry in comp_runs_collections table created |
| 581 | + # as the project metadata has the same group_id |
0 commit comments