Skip to content

Commit 561a633

Browse files
committed
ensure test runs with whole metadata
1 parent 3481ec3 commit 561a633

File tree

2 files changed

+65
-36
lines changed

2 files changed

+65
-36
lines changed

services/director-v2/tests/unit/with_dbs/conftest.py

Lines changed: 54 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -8,15 +8,16 @@
88
import datetime
99
import json
1010
from collections.abc import Awaitable, Callable, Iterator
11-
from typing import Any
11+
from typing import Any, cast
1212
from uuid import uuid4
1313

1414
import pytest
1515
import sqlalchemy as sa
1616
from _helpers import PublishedProject, RunningProject
1717
from faker import Faker
18+
from fastapi.encoders import jsonable_encoder
1819
from models_library.clusters import Cluster
19-
from models_library.projects import ProjectAtDB
20+
from models_library.projects import ProjectAtDB, ProjectID
2021
from models_library.projects_nodes_io import NodeID
2122
from pydantic.main import BaseModel
2223
from simcore_postgres_database.models.cluster_to_groups import cluster_to_groups
@@ -25,7 +26,11 @@
2526
from simcore_postgres_database.models.comp_runs import comp_runs
2627
from simcore_postgres_database.models.comp_tasks import comp_tasks
2728
from simcore_service_director_v2.models.comp_pipelines import CompPipelineAtDB
28-
from simcore_service_director_v2.models.comp_runs import CompRunsAtDB, RunMetadataDict
29+
from simcore_service_director_v2.models.comp_runs import (
30+
CompRunsAtDB,
31+
ProjectMetadataDict,
32+
RunMetadataDict,
33+
)
2934
from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB, Image
3035
from simcore_service_director_v2.utils.computations import to_node_class
3136
from simcore_service_director_v2.utils.dask import generate_dask_job_id
@@ -84,28 +89,36 @@ def creator(
8489
"project_id": f"{project.uuid}",
8590
"node_id": f"{node_id}",
8691
"schema": {"inputs": {}, "outputs": {}},
87-
"inputs": {
88-
key: json.loads(value.json(by_alias=True, exclude_unset=True))
89-
if isinstance(value, BaseModel)
90-
else value
91-
for key, value in node_data.inputs.items()
92-
}
93-
if node_data.inputs
94-
else {},
95-
"outputs": {
96-
key: json.loads(value.json(by_alias=True, exclude_unset=True))
97-
if isinstance(value, BaseModel)
98-
else value
99-
for key, value in node_data.outputs.items()
100-
}
101-
if node_data.outputs
102-
else {},
92+
"inputs": (
93+
{
94+
key: (
95+
json.loads(value.json(by_alias=True, exclude_unset=True))
96+
if isinstance(value, BaseModel)
97+
else value
98+
)
99+
for key, value in node_data.inputs.items()
100+
}
101+
if node_data.inputs
102+
else {}
103+
),
104+
"outputs": (
105+
{
106+
key: (
107+
json.loads(value.json(by_alias=True, exclude_unset=True))
108+
if isinstance(value, BaseModel)
109+
else value
110+
)
111+
for key, value in node_data.outputs.items()
112+
}
113+
if node_data.outputs
114+
else {}
115+
),
103116
"image": Image(name=node_data.key, tag=node_data.version).dict( # type: ignore
104117
by_alias=True, exclude_unset=True
105118
), # type: ignore
106119
"node_class": to_node_class(node_data.key),
107120
"internal_id": internal_id + 1,
108-
"submit": datetime.datetime.now(tz=datetime.timezone.utc),
121+
"submit": datetime.datetime.now(tz=datetime.UTC),
109122
"job_id": generate_dask_job_id(
110123
service_key=node_data.key,
111124
service_version=node_data.version,
@@ -135,9 +148,26 @@ def creator(
135148
)
136149

137150

151+
@pytest.fixture
152+
def project_metadata(faker: Faker) -> ProjectMetadataDict:
153+
return ProjectMetadataDict(
154+
parent_node_id=cast(NodeID, faker.uuid4(cast_to=None)),
155+
parent_node_name=faker.pystr(),
156+
parent_project_id=cast(ProjectID, faker.uuid4(cast_to=None)),
157+
parent_project_name=faker.pystr(),
158+
root_parent_project_id=cast(ProjectID, faker.uuid4(cast_to=None)),
159+
root_parent_project_name=faker.pystr(),
160+
root_parent_node_id=cast(NodeID, faker.uuid4(cast_to=None)),
161+
root_parent_node_name=faker.pystr(),
162+
)
163+
164+
138165
@pytest.fixture
139166
def run_metadata(
140-
osparc_product_name: str, simcore_user_agent: str, faker: Faker
167+
osparc_product_name: str,
168+
simcore_user_agent: str,
169+
project_metadata: ProjectMetadataDict,
170+
faker: Faker,
141171
) -> RunMetadataDict:
142172
return RunMetadataDict(
143173
node_id_names_map={},
@@ -147,6 +177,7 @@ def run_metadata(
147177
user_email=faker.email(),
148178
wallet_id=faker.pyint(min_value=1),
149179
wallet_name=faker.name(),
180+
project_metadata=project_metadata,
150181
)
151182

152183

@@ -171,7 +202,7 @@ def creator(
171202
with postgres_db.connect() as conn:
172203
result = conn.execute(
173204
comp_runs.insert()
174-
.values(**run_config)
205+
.values(**jsonable_encoder(run_config))
175206
.returning(sa.literal_column("*"))
176207
)
177208
new_run = CompRunsAtDB.from_orm(result.first())
@@ -298,7 +329,7 @@ async def running_project(
298329
project=created_project,
299330
state=StateType.RUNNING,
300331
progress=0.0,
301-
start=datetime.datetime.now(tz=datetime.timezone.utc),
332+
start=datetime.datetime.now(tz=datetime.UTC),
302333
),
303334
runs=runs(user=user, project=created_project, result=StateType.RUNNING),
304335
)

services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py

Lines changed: 11 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -381,22 +381,17 @@ async def test_misconfigured_pipeline_is_not_scheduled(
381381
)
382382
run_entry = CompRunsAtDB.parse_obj(await result.first())
383383
assert run_entry.result == RunningState.ABORTED
384+
assert run_entry.metadata == run_metadata
384385

385386

386387
async def _assert_start_pipeline(
387-
aiopg_engine, published_project: PublishedProject, scheduler: BaseCompScheduler
388+
aiopg_engine,
389+
published_project: PublishedProject,
390+
scheduler: BaseCompScheduler,
391+
run_metadata: RunMetadataDict,
388392
) -> list[CompTaskAtDB]:
389393
exp_published_tasks = deepcopy(published_project.tasks)
390394
assert published_project.project.prj_owner
391-
run_metadata = RunMetadataDict(
392-
node_id_names_map={},
393-
project_name="",
394-
product_name="",
395-
simcore_user_agent="",
396-
user_email="",
397-
wallet_id=231,
398-
wallet_name="",
399-
)
400395
await scheduler.run_new_pipeline(
401396
user_id=published_project.project.prj_owner,
402397
project_id=published_project.project.uuid,
@@ -618,11 +613,12 @@ async def test_proper_pipeline_is_scheduled( # noqa: PLR0915
618613
mocked_clean_task_output_and_log_files_if_invalid: None,
619614
instrumentation_rabbit_client_parser: mock.AsyncMock,
620615
resource_tracking_rabbit_client_parser: mock.AsyncMock,
616+
run_metadata: RunMetadataDict,
621617
):
622618
_mock_send_computation_tasks(published_project.tasks, mocked_dask_client)
623619

624620
expected_published_tasks = await _assert_start_pipeline(
625-
aiopg_engine, published_project, scheduler
621+
aiopg_engine, published_project, scheduler, run_metadata
626622
)
627623

628624
# -------------------------------------------------------------------------------
@@ -990,10 +986,11 @@ async def test_task_progress_triggers(
990986
published_project: PublishedProject,
991987
mocked_parse_output_data_fct: None,
992988
mocked_clean_task_output_and_log_files_if_invalid: None,
989+
run_metadata: RunMetadataDict,
993990
):
994991
_mock_send_computation_tasks(published_project.tasks, mocked_dask_client)
995992
expected_published_tasks = await _assert_start_pipeline(
996-
aiopg_engine, published_project, scheduler
993+
aiopg_engine, published_project, scheduler, run_metadata
997994
)
998995
# -------------------------------------------------------------------------------
999996
# 1. first run will move comp_tasks to PENDING so the worker can take them
@@ -1286,10 +1283,11 @@ async def test_running_pipeline_triggers_heartbeat(
12861283
aiopg_engine: aiopg.sa.engine.Engine,
12871284
published_project: PublishedProject,
12881285
resource_tracking_rabbit_client_parser: mock.AsyncMock,
1286+
run_metadata: RunMetadataDict,
12891287
):
12901288
_mock_send_computation_tasks(published_project.tasks, mocked_dask_client)
12911289
expected_published_tasks = await _assert_start_pipeline(
1292-
aiopg_engine, published_project, scheduler
1290+
aiopg_engine, published_project, scheduler, run_metadata
12931291
)
12941292
# -------------------------------------------------------------------------------
12951293
# 1. first run will move comp_tasks to PENDING so the worker can take them

0 commit comments

Comments
 (0)