|
| 1 | +# pylint: disable=no-value-for-parameter |
| 2 | +# pylint: disable=protected-access |
| 3 | +# pylint: disable=redefined-outer-name |
| 4 | +# pylint: disable=too-many-arguments |
| 5 | +# pylint: disable=unused-argument |
| 6 | +# pylint: disable=unused-variable |
| 7 | +# pylint: disable=too-many-positional-arguments |
| 8 | + |
| 9 | +from collections.abc import Awaitable, Callable |
| 10 | +from typing import Any |
| 11 | + |
| 12 | +from models_library.api_schemas_directorv2.comp_runs import ComputationRunRpcGetPage |
| 13 | +from models_library.projects import ProjectAtDB |
| 14 | +from servicelib.rabbitmq import RabbitMQRPCClient |
| 15 | +from servicelib.rabbitmq.rpc_interfaces.director_v2 import ( |
| 16 | + computations as rpc_computations, |
| 17 | +) |
| 18 | +from simcore_postgres_database.models.comp_pipeline import StateType |
| 19 | +from simcore_service_director_v2.models.comp_pipelines import CompPipelineAtDB |
| 20 | +from simcore_service_director_v2.models.comp_runs import CompRunsAtDB |
| 21 | +from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB |
| 22 | + |
| 23 | +pytest_simcore_core_services_selection = ["postgres", "rabbit", "redis"] |
| 24 | +pytest_simcore_ops_services_selection = [ |
| 25 | + "adminer", |
| 26 | +] |
| 27 | + |
| 28 | + |
| 29 | +# @pytest.fixture() |
| 30 | +# def minimal_configuration( |
| 31 | +# mock_env: EnvVarsDict, |
| 32 | +# postgres_host_config: dict[str, str], |
| 33 | +# rabbit_service: RabbitSettings, |
| 34 | +# redis_service: RedisSettings, |
| 35 | +# monkeypatch: pytest.MonkeyPatch, |
| 36 | +# faker: Faker, |
| 37 | +# with_disabled_auto_scheduling: mock.Mock, |
| 38 | +# with_disabled_scheduler_publisher: mock.Mock, |
| 39 | +# ): |
| 40 | +# monkeypatch.setenv("DIRECTOR_V2_DYNAMIC_SIDECAR_ENABLED", "false") |
| 41 | +# monkeypatch.setenv("COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED", "1") |
| 42 | +# monkeypatch.setenv("COMPUTATIONAL_BACKEND_ENABLED", "1") |
| 43 | +# monkeypatch.setenv("R_CLONE_PROVIDER", "MINIO") |
| 44 | +# monkeypatch.setenv("S3_ENDPOINT", faker.url()) |
| 45 | +# monkeypatch.setenv("S3_ACCESS_KEY", faker.pystr()) |
| 46 | +# monkeypatch.setenv("S3_REGION", faker.pystr()) |
| 47 | +# monkeypatch.setenv("S3_SECRET_KEY", faker.pystr()) |
| 48 | +# monkeypatch.setenv("S3_BUCKET_NAME", faker.pystr()) |
| 49 | + |
| 50 | + |
| 51 | +async def test_get_computation_from_published_computation_task( |
| 52 | + # minimal_configuration: None, |
| 53 | + fake_workbench_without_outputs: dict[str, Any], |
| 54 | + fake_workbench_adjacency: dict[str, Any], |
| 55 | + registered_user: Callable[..., dict[str, Any]], |
| 56 | + project: Callable[..., Awaitable[ProjectAtDB]], |
| 57 | + create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], |
| 58 | + create_tasks: Callable[..., Awaitable[list[CompTaskAtDB]]], |
| 59 | + create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], |
| 60 | + # async_client: httpx.AsyncClient, |
| 61 | + rpc_client: RabbitMQRPCClient, |
| 62 | +): |
| 63 | + user = registered_user() |
| 64 | + proj = await project(user, workbench=fake_workbench_without_outputs) |
| 65 | + await create_pipeline( |
| 66 | + project_id=f"{proj.uuid}", |
| 67 | + dag_adjacency_list=fake_workbench_adjacency, |
| 68 | + ) |
| 69 | + comp_tasks = await create_tasks( |
| 70 | + user=user, project=proj, state=StateType.PUBLISHED, progress=0 |
| 71 | + ) |
| 72 | + comp_runs = await create_comp_run( |
| 73 | + user=user, project=proj, result=StateType.PUBLISHED |
| 74 | + ) |
| 75 | + assert comp_runs |
| 76 | + |
| 77 | + output = await rpc_computations.list_computations_latest_iteration_page( |
| 78 | + rpc_client, product_name="osparc", user_id=user["id"] |
| 79 | + ) |
| 80 | + assert output.total == 1 |
| 81 | + assert isinstance(output, ComputationRunRpcGetPage) |
| 82 | + |
| 83 | + # get_computation_url = httpx.URL( |
| 84 | + # f"/v2/computations/{proj.uuid}?user_id={user['id']}" |
| 85 | + # ) |
| 86 | + # response = await async_client.get(get_computation_url) |
| 87 | + # assert response.status_code == status.HTTP_200_OK, response.text |
| 88 | + # returned_computation = ComputationGet.model_validate(response.json()) |
| 89 | + # assert returned_computation |
| 90 | + # expected_stop_url = async_client.base_url.join( |
| 91 | + # f"/v2/computations/{proj.uuid}:stop?user_id={user['id']}" |
| 92 | + # ) |
0 commit comments