Skip to content

Commit 978f407

Browse files
author
Andrei Neagu
committed
refactor types
1 parent 486fb33 commit 978f407

File tree

6 files changed

+21
-17
lines changed

6 files changed

+21
-17
lines changed

services/director-v2/src/simcore_service_director_v2/modules/dask_client.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@
4747
from models_library.projects import ProjectID
4848
from models_library.projects_nodes_io import NodeID
4949
from models_library.resource_tracker import HardwareInfo
50+
from models_library.services import RunID
5051
from models_library.users import UserID
5152
from pydantic import TypeAdapter, ValidationError
5253
from pydantic.networks import AnyUrl
@@ -293,7 +294,7 @@ async def send_computation_tasks(
293294
remote_fct: ContainerRemoteFct | None = None,
294295
metadata: RunMetadataDict,
295296
hardware_info: HardwareInfo,
296-
resource_tracking_run_id: str,
297+
resource_tracking_run_id: RunID,
297298
) -> list[PublishedComputationTask]:
298299
"""actually sends the function remote_fct to be remotely executed. if None is kept then the default
299300
function that runs container will be started.

services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/substitutions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,7 @@ async def resolve_and_substitute_session_variables_in_specs(
225225
product_name: str,
226226
project_id: ProjectID,
227227
node_id: NodeID,
228-
run_id: RunID | str,
228+
run_id: RunID,
229229
) -> dict[str, Any]:
230230
table = OsparcSessionVariablesTable.get_from_app_state(app)
231231
resolver = SpecsSubstitutionsResolver(specs, upgrade=False)

services/director-v2/src/simcore_service_director_v2/utils/dask.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
from models_library.projects import ProjectID, ProjectIDStr
2727
from models_library.projects_nodes_io import NodeID, NodeIDStr
2828
from models_library.services import ServiceKey, ServiceVersion
29+
from models_library.services_types import RunID
2930
from models_library.users import UserID
3031
from pydantic import AnyUrl, ByteSize, TypeAdapter, ValidationError
3132
from servicelib.logging_utils import log_catch, log_context
@@ -342,7 +343,7 @@ async def compute_task_envs(
342343
node_id: NodeID,
343344
node_image: Image,
344345
metadata: RunMetadataDict,
345-
resource_tracking_run_id: str,
346+
resource_tracking_run_id: RunID,
346347
) -> ContainerEnvsDict:
347348
product_name = metadata.get("product_name", UNDEFINED_DOCKER_LABEL)
348349
task_envs = node_image.envs

services/director-v2/tests/unit/test_modules_dask_client.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -442,7 +442,7 @@ async def test_send_computation_task(
442442
task_labels: ContainerLabelsDict,
443443
empty_hardware_info: HardwareInfo,
444444
faker: Faker,
445-
resource_tracking_run_id: str,
445+
resource_tracking_run_id: RunID,
446446
):
447447
_DASK_EVENT_NAME = faker.pystr()
448448

@@ -561,7 +561,7 @@ async def test_computation_task_is_persisted_on_dask_scheduler(
561561
mocked_storage_service_api: respx.MockRouter,
562562
comp_run_metadata: RunMetadataDict,
563563
empty_hardware_info: HardwareInfo,
564-
resource_tracking_run_id: str,
564+
resource_tracking_run_id: RunID,
565565
):
566566
"""rationale:
567567
When a task is submitted to the dask backend, a dask future is returned.
@@ -653,7 +653,7 @@ async def test_abort_computation_tasks(
653653
faker: Faker,
654654
comp_run_metadata: RunMetadataDict,
655655
empty_hardware_info: HardwareInfo,
656-
resource_tracking_run_id: str,
656+
resource_tracking_run_id: RunID,
657657
):
658658
_DASK_EVENT_NAME = faker.pystr()
659659

@@ -744,7 +744,7 @@ async def test_failed_task_returns_exceptions(
744744
mocked_storage_service_api: respx.MockRouter,
745745
comp_run_metadata: RunMetadataDict,
746746
empty_hardware_info: HardwareInfo,
747-
resource_tracking_run_id: str,
747+
resource_tracking_run_id: RunID,
748748
):
749749
# NOTE: this must be inlined so that the test works,
750750
# the dask-worker must be able to import the function
@@ -808,7 +808,7 @@ async def test_send_computation_task_with_missing_resources_raises(
808808
mocked_storage_service_api: respx.MockRouter,
809809
comp_run_metadata: RunMetadataDict,
810810
empty_hardware_info: HardwareInfo,
811-
resource_tracking_run_id: str,
811+
resource_tracking_run_id: RunID,
812812
):
813813
# remove the workers that can handle gpu
814814
scheduler_info = dask_client.backend.client.scheduler_info()
@@ -854,7 +854,7 @@ async def test_send_computation_task_with_hardware_info_raises(
854854
mocked_storage_service_api: respx.MockRouter,
855855
comp_run_metadata: RunMetadataDict,
856856
hardware_info: HardwareInfo,
857-
resource_tracking_run_id: str,
857+
resource_tracking_run_id: RunID,
858858
):
859859
# NOTE: running on the default cluster will raise missing resources
860860
with pytest.raises(MissingComputationalResourcesError):
@@ -884,7 +884,7 @@ async def test_too_many_resources_send_computation_task(
884884
mocked_storage_service_api: respx.MockRouter,
885885
comp_run_metadata: RunMetadataDict,
886886
empty_hardware_info: HardwareInfo,
887-
resource_tracking_run_id: str,
887+
resource_tracking_run_id: RunID,
888888
):
889889
# create an image that needs a huge amount of CPU
890890
image = Image(
@@ -925,7 +925,7 @@ async def test_disconnected_backend_raises_exception(
925925
mocked_storage_service_api: respx.MockRouter,
926926
comp_run_metadata: RunMetadataDict,
927927
empty_hardware_info: HardwareInfo,
928-
resource_tracking_run_id: str,
928+
resource_tracking_run_id: RunID,
929929
):
930930
# DISCONNECT THE CLUSTER
931931
await dask_spec_local_cluster.close() # type: ignore
@@ -958,7 +958,7 @@ async def test_changed_scheduler_raises_exception(
958958
unused_tcp_port_factory: Callable,
959959
comp_run_metadata: RunMetadataDict,
960960
empty_hardware_info: HardwareInfo,
961-
resource_tracking_run_id: str,
961+
resource_tracking_run_id: RunID,
962962
):
963963
# change the scheduler (stop the current one and start another at the same address)
964964
scheduler_address = URL(dask_spec_local_cluster.scheduler_address)
@@ -1006,7 +1006,7 @@ async def test_get_tasks_status(
10061006
fail_remote_fct: bool,
10071007
comp_run_metadata: RunMetadataDict,
10081008
empty_hardware_info: HardwareInfo,
1009-
resource_tracking_run_id: str,
1009+
resource_tracking_run_id: RunID,
10101010
):
10111011
# NOTE: this must be inlined so that the test works,
10121012
# the dask-worker must be able to import the function
@@ -1089,7 +1089,7 @@ async def test_dask_sub_handlers(
10891089
fake_task_handlers: TaskHandlers,
10901090
comp_run_metadata: RunMetadataDict,
10911091
empty_hardware_info: HardwareInfo,
1092-
resource_tracking_run_id: str,
1092+
resource_tracking_run_id: RunID,
10931093
):
10941094
dask_client.register_handlers(fake_task_handlers)
10951095
_DASK_START_EVENT = "start"
@@ -1164,7 +1164,7 @@ async def test_get_cluster_details(
11641164
comp_run_metadata: RunMetadataDict,
11651165
empty_hardware_info: HardwareInfo,
11661166
faker: Faker,
1167-
resource_tracking_run_id: str,
1167+
resource_tracking_run_id: RunID,
11681168
):
11691169
cluster_details = await dask_client.get_cluster_details()
11701170
assert cluster_details

services/director-v2/tests/unit/test_modules_osparc_variables.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
from fastapi import FastAPI
1919
from models_library.service_settings_labels import ComposeSpecLabelDict
2020
from models_library.services import ServiceKey, ServiceVersion
21+
from models_library.services_types import RunID
2122
from models_library.users import UserID
2223
from models_library.utils.specs_substitution import SubstitutionValue
2324
from models_library.utils.string_substitution import OSPARC_IDENTIFIER_PREFIX
@@ -184,7 +185,7 @@ async def test_resolve_and_substitute_session_variables_in_specs(
184185
product_name="a_product",
185186
project_id=faker.uuid4(cast_to=None),
186187
node_id=faker.uuid4(cast_to=None),
187-
run_id="some_run_id",
188+
run_id=RunID("some_run_id"),
188189
)
189190
print("REPLACED SPECS\n", replaced_specs)
190191

services/director-v2/tests/unit/with_dbs/test_utils_dask.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
from models_library.docker import to_simcore_runtime_docker_label_key
3535
from models_library.projects import ProjectID
3636
from models_library.projects_nodes_io import NodeID, SimCoreFileLink, SimcoreS3FileID
37+
from models_library.services import RunID
3738
from models_library.users import UserID
3839
from pydantic import ByteSize, TypeAdapter
3940
from pydantic.networks import AnyUrl
@@ -647,7 +648,7 @@ async def test_compute_task_envs(
647648
run_metadata: RunMetadataDict,
648649
input_task_envs: ContainerEnvsDict,
649650
expected_computed_task_envs: ContainerEnvsDict,
650-
resource_tracking_run_id: str,
651+
resource_tracking_run_id: RunID,
651652
):
652653
sleeper_task: CompTaskAtDB = published_project.tasks[1]
653654
sleeper_task.image.envs = input_task_envs

0 commit comments

Comments
 (0)