Skip to content

Commit caa2799

Browse files
committed
rename
1 parent 5f6f274 commit caa2799

File tree

4 files changed

+55
-45
lines changed

4 files changed

+55
-45
lines changed

packages/pytest-simcore/src/pytest_simcore/db_entries_mocks.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
from faker import Faker
1515
from models_library.products import ProductName
1616
from models_library.projects import ProjectAtDB, ProjectID
17+
from models_library.projects_nodes import Node
1718
from models_library.projects_nodes_io import NodeID
1819
from pytest_simcore.helpers.logging_tools import log_context
1920
from simcore_postgres_database.models.comp_pipeline import StateType, comp_pipeline
@@ -134,23 +135,27 @@ async def _(
134135
project_nodes_repo = ProjectNodesRepo(project_uuid=project_uuid)
135136

136137
for node_id, node_data in project_workbench.items():
138+
# NOTE: workbench node have a lot of camecase fields. We validate with Node and
139+
# export to ProjectNodeCreate with alias=False
140+
node_model = Node.model_validate(node_data)
141+
137142
# NOTE: currently no resources is passed until it becomes necessary
138-
node_values = {
143+
project_workbench_node = {
139144
"required_resources": {},
140145
"key": random_service_key(fake=faker),
141146
"version": random_service_version(fake=faker),
142147
"label": faker.pystr(),
143-
**node_data,
148+
**node_model.model_dump(mode="json", by_alias=False),
144149
}
145150

146151
if project_nodes_overrides:
147-
node_values.update(project_nodes_overrides)
152+
project_workbench_node.update(project_nodes_overrides)
148153

149154
await project_nodes_repo.add(
150155
con,
151156
nodes=[
152157
ProjectNodeCreate(
153-
node_id=NodeID(node_id), **node_values
158+
node_id=NodeID(node_id), **project_workbench_node
154159
)
155160
],
156161
)

services/director-v2/tests/helpers/shared_comp_utils.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,14 +22,14 @@ async def assert_computation_task_out_obj(
2222
task_out: ComputationGet,
2323
*,
2424
project_uuid: ProjectID,
25-
exp_task_state: RunningState,
26-
exp_pipeline_details: PipelineDetails,
25+
expected_task_state: RunningState,
26+
expected_pipeline_details: PipelineDetails,
2727
iteration: PositiveInt | None,
2828
) -> None:
2929
assert task_out.id == project_uuid
30-
assert task_out.state == exp_task_state
30+
assert task_out.state == expected_task_state
3131
assert task_out.url.path == f"/v2/computations/{project_uuid}"
32-
if exp_task_state in [
32+
if expected_task_state in [
3333
RunningState.PUBLISHED,
3434
RunningState.PENDING,
3535
RunningState.STARTED,
@@ -41,7 +41,7 @@ async def assert_computation_task_out_obj(
4141
assert task_out.iteration == iteration
4242
# check pipeline details contents
4343
received_task_out_pipeline = task_out.pipeline_details.model_dump()
44-
expected_task_out_pipeline = exp_pipeline_details.model_dump()
44+
expected_task_out_pipeline = expected_pipeline_details.model_dump()
4545
assert received_task_out_pipeline == expected_task_out_pipeline
4646

4747

services/director-v2/tests/integration/01/test_computation_api.py

Lines changed: 39 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -415,26 +415,27 @@ async def test_run_partial_computation(
415415
)
416416

417417
def _convert_to_pipeline_details(
418-
project: ProjectAtDB,
418+
workbench_node_uuids: list[str],
419419
expected_pipeline_adj_list: dict[int, list[int]],
420420
expected_node_states: dict[int, dict[str, Any]],
421421
) -> PipelineDetails:
422-
workbench_node_uuids = list(project.workbench.keys())
422+
423423
converted_adj_list: dict[NodeID, list[NodeID]] = {}
424424
for node_key, next_nodes in expected_pipeline_adj_list.items():
425425
converted_adj_list[NodeID(workbench_node_uuids[node_key])] = [
426426
NodeID(workbench_node_uuids[n]) for n in next_nodes
427427
]
428428
converted_node_states: dict[NodeID, NodeState] = {
429-
NodeID(workbench_node_uuids[n]): NodeState(
430-
modified=s["modified"],
429+
NodeID(workbench_node_uuids[node_index]): NodeState(
430+
modified=node_state["modified"],
431431
dependencies={
432-
NodeID(workbench_node_uuids[dep_n]) for dep_n in s["dependencies"]
432+
NodeID(workbench_node_uuids[dep_n])
433+
for dep_n in node_state["dependencies"]
433434
},
434-
currentStatus=s.get("currentStatus", RunningState.NOT_STARTED),
435-
progress=s.get("progress"),
435+
currentStatus=node_state.get("currentStatus", RunningState.NOT_STARTED),
436+
progress=node_state.get("progress"),
436437
)
437-
for n, s in expected_node_states.items()
438+
for node_index, node_state in expected_node_states.items()
438439
}
439440
pipeline_progress = 0
440441
for node_id in converted_adj_list:
@@ -448,7 +449,9 @@ def _convert_to_pipeline_details(
448449

449450
# convert the ids to the node uuids from the project
450451
expected_pipeline_details = _convert_to_pipeline_details(
451-
sleepers_project, params.exp_pipeline_adj_list, params.exp_node_states
452+
workbench_node_uuids=list(sleepers_project.workbench.keys()),
453+
expected_pipeline_adj_list=params.exp_pipeline_adj_list,
454+
expected_node_states=params.exp_node_states,
452455
)
453456

454457
# send a valid project with sleepers
@@ -469,8 +472,8 @@ def _convert_to_pipeline_details(
469472
await assert_computation_task_out_obj(
470473
task_out,
471474
project_uuid=sleepers_project.uuid,
472-
exp_task_state=RunningState.PUBLISHED,
473-
exp_pipeline_details=expected_pipeline_details,
475+
expected_task_state=RunningState.PUBLISHED,
476+
expected_pipeline_details=expected_pipeline_details,
474477
iteration=1,
475478
)
476479

@@ -479,13 +482,15 @@ def _convert_to_pipeline_details(
479482
async_client, task_out.url, user["id"], sleepers_project.uuid
480483
)
481484
expected_pipeline_details_after_run = _convert_to_pipeline_details(
482-
sleepers_project, params.exp_pipeline_adj_list, params.exp_node_states_after_run
485+
workbench_node_uuids=list(sleepers_project.workbench.keys()),
486+
expected_pipeline_adj_list=params.exp_pipeline_adj_list,
487+
expected_node_states=params.exp_node_states_after_run,
483488
)
484489
await assert_computation_task_out_obj(
485490
task_out,
486491
project_uuid=sleepers_project.uuid,
487-
exp_task_state=RunningState.SUCCESS,
488-
exp_pipeline_details=expected_pipeline_details_after_run,
492+
expected_task_state=RunningState.SUCCESS,
493+
expected_pipeline_details=expected_pipeline_details_after_run,
489494
iteration=1,
490495
)
491496

@@ -537,8 +542,8 @@ def _convert_to_pipeline_details(
537542
await assert_computation_task_out_obj(
538543
task_out,
539544
project_uuid=sleepers_project.uuid,
540-
exp_task_state=RunningState.PUBLISHED,
541-
exp_pipeline_details=expected_pipeline_details_forced,
545+
expected_task_state=RunningState.PUBLISHED,
546+
expected_pipeline_details=expected_pipeline_details_forced,
542547
iteration=2,
543548
)
544549

@@ -582,8 +587,8 @@ async def test_run_computation(
582587
await assert_computation_task_out_obj(
583588
task_out,
584589
project_uuid=sleepers_project.uuid,
585-
exp_task_state=RunningState.PUBLISHED,
586-
exp_pipeline_details=fake_workbench_computational_pipeline_details,
590+
expected_task_state=RunningState.PUBLISHED,
591+
expected_pipeline_details=fake_workbench_computational_pipeline_details,
587592
iteration=1,
588593
)
589594

@@ -604,8 +609,8 @@ async def test_run_computation(
604609
await assert_computation_task_out_obj(
605610
task_out,
606611
project_uuid=sleepers_project.uuid,
607-
exp_task_state=RunningState.SUCCESS,
608-
exp_pipeline_details=fake_workbench_computational_pipeline_details_completed,
612+
expected_task_state=RunningState.SUCCESS,
613+
expected_pipeline_details=fake_workbench_computational_pipeline_details_completed,
609614
iteration=1,
610615
)
611616

@@ -652,8 +657,8 @@ async def test_run_computation(
652657
await assert_computation_task_out_obj(
653658
task_out,
654659
project_uuid=sleepers_project.uuid,
655-
exp_task_state=RunningState.PUBLISHED,
656-
exp_pipeline_details=expected_pipeline_details_forced, # NOTE: here the pipeline already ran so its states are different
660+
expected_task_state=RunningState.PUBLISHED,
661+
expected_pipeline_details=expected_pipeline_details_forced, # NOTE: here the pipeline already ran so its states are different
657662
iteration=2,
658663
)
659664

@@ -664,8 +669,8 @@ async def test_run_computation(
664669
await assert_computation_task_out_obj(
665670
task_out,
666671
project_uuid=sleepers_project.uuid,
667-
exp_task_state=RunningState.SUCCESS,
668-
exp_pipeline_details=fake_workbench_computational_pipeline_details_completed,
672+
expected_task_state=RunningState.SUCCESS,
673+
expected_pipeline_details=fake_workbench_computational_pipeline_details_completed,
669674
iteration=2,
670675
)
671676

@@ -705,8 +710,8 @@ async def test_abort_computation(
705710
await assert_computation_task_out_obj(
706711
task_out,
707712
project_uuid=sleepers_project.uuid,
708-
exp_task_state=RunningState.PUBLISHED,
709-
exp_pipeline_details=fake_workbench_computational_pipeline_details,
713+
expected_task_state=RunningState.PUBLISHED,
714+
expected_pipeline_details=fake_workbench_computational_pipeline_details,
710715
iteration=1,
711716
)
712717

@@ -782,8 +787,8 @@ async def test_update_and_delete_computation(
782787
await assert_computation_task_out_obj(
783788
task_out,
784789
project_uuid=sleepers_project.uuid,
785-
exp_task_state=RunningState.NOT_STARTED,
786-
exp_pipeline_details=fake_workbench_computational_pipeline_details_not_started,
790+
expected_task_state=RunningState.NOT_STARTED,
791+
expected_pipeline_details=fake_workbench_computational_pipeline_details_not_started,
787792
iteration=None,
788793
)
789794

@@ -801,8 +806,8 @@ async def test_update_and_delete_computation(
801806
await assert_computation_task_out_obj(
802807
task_out,
803808
project_uuid=sleepers_project.uuid,
804-
exp_task_state=RunningState.NOT_STARTED,
805-
exp_pipeline_details=fake_workbench_computational_pipeline_details_not_started,
809+
expected_task_state=RunningState.NOT_STARTED,
810+
expected_pipeline_details=fake_workbench_computational_pipeline_details_not_started,
806811
iteration=None,
807812
)
808813

@@ -820,8 +825,8 @@ async def test_update_and_delete_computation(
820825
await assert_computation_task_out_obj(
821826
task_out,
822827
project_uuid=sleepers_project.uuid,
823-
exp_task_state=RunningState.NOT_STARTED,
824-
exp_pipeline_details=fake_workbench_computational_pipeline_details_not_started,
828+
expected_task_state=RunningState.NOT_STARTED,
829+
expected_pipeline_details=fake_workbench_computational_pipeline_details_not_started,
825830
iteration=None,
826831
)
827832

@@ -838,8 +843,8 @@ async def test_update_and_delete_computation(
838843
await assert_computation_task_out_obj(
839844
task_out,
840845
project_uuid=sleepers_project.uuid,
841-
exp_task_state=RunningState.PUBLISHED,
842-
exp_pipeline_details=fake_workbench_computational_pipeline_details,
846+
expected_task_state=RunningState.PUBLISHED,
847+
expected_pipeline_details=fake_workbench_computational_pipeline_details,
843848
iteration=1,
844849
)
845850

services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1008,8 +1008,8 @@ async def test_nodeports_integration(
10081008
await assert_computation_task_out_obj(
10091009
task_out,
10101010
project_uuid=current_study.uuid,
1011-
exp_task_state=RunningState.SUCCESS,
1012-
exp_pipeline_details=PipelineDetails.model_validate(fake_dy_success),
1011+
expected_task_state=RunningState.SUCCESS,
1012+
expected_pipeline_details=PipelineDetails.model_validate(fake_dy_success),
10131013
iteration=1,
10141014
)
10151015
update_project_workbench_with_comp_tasks(str(current_study.uuid))

0 commit comments

Comments
 (0)