Skip to content

Commit ed9b0b7

Browse files
committed
fix some tests
1 parent cc8bc28 commit ed9b0b7

File tree

4 files changed

+22
-19
lines changed

4 files changed

+22
-19
lines changed

services/director-v2/src/simcore_service_director_v2/modules/dask_client.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -359,7 +359,7 @@ async def send_computation_tasks(
359359
try:
360360
# This instance is created only once so it can be reused in calls below
361361
node_ports = await dask_utils.create_node_ports(
362-
db_engine=self.app.state.engine,
362+
db_engine=self.app.state.asyncpg_engine,
363363
user_id=user_id,
364364
project_id=project_id,
365365
node_id=node_id,
@@ -439,14 +439,14 @@ async def get_tasks_status(self, job_ids: list[str]) -> list[DaskClientTaskState
439439
def _get_pipeline_statuses(
440440
dask_scheduler: distributed.Scheduler,
441441
) -> dict[dask.typing.Key, DaskSchedulerTaskState | None]:
442-
statuses: dict[
443-
dask.typing.Key, DaskSchedulerTaskState | None
444-
] = dask_scheduler.get_task_status(keys=job_ids)
442+
statuses: dict[dask.typing.Key, DaskSchedulerTaskState | None] = (
443+
dask_scheduler.get_task_status(keys=job_ids)
444+
)
445445
return statuses
446446

447-
task_statuses: dict[
448-
dask.typing.Key, DaskSchedulerTaskState | None
449-
] = await self.backend.client.run_on_scheduler(_get_pipeline_statuses)
447+
task_statuses: dict[dask.typing.Key, DaskSchedulerTaskState | None] = (
448+
await self.backend.client.run_on_scheduler(_get_pipeline_statuses)
449+
)
450450
assert isinstance(task_statuses, dict) # nosec
451451

452452
_logger.debug("found dask task statuses: %s", f"{task_statuses=}")
@@ -578,10 +578,10 @@ def _get_worker_used_resources(
578578

579579
with log_catch(_logger, reraise=False):
580580
# NOTE: this runs directly on the dask-scheduler and may rise exceptions
581-
used_resources_per_worker: dict[
582-
str, dict[str, Any]
583-
] = await dask_utils.wrap_client_async_routine(
584-
self.backend.client.run_on_scheduler(_get_worker_used_resources)
581+
used_resources_per_worker: dict[str, dict[str, Any]] = (
582+
await dask_utils.wrap_client_async_routine(
583+
self.backend.client.run_on_scheduler(_get_worker_used_resources)
584+
)
585585
)
586586

587587
# let's update the scheduler info, with default to 0s since sometimes

services/director-v2/tests/unit/conftest.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@
4848
def disable_postgres(mocker) -> None:
4949
def mock_setup(app: FastAPI, *args, **kwargs) -> None:
5050
app.state.engine = mock.AsyncMock()
51+
app.state.asyncpg_engine = mock.AsyncMock()
5152

5253
mocker.patch("simcore_service_director_v2.modules.db.setup", side_effect=mock_setup)
5354

@@ -152,7 +153,7 @@ def mock_service_inspect(
152153

153154
@pytest.fixture
154155
def scheduler_data_from_service_inspect(
155-
mock_service_inspect: Mapping[str, Any]
156+
mock_service_inspect: Mapping[str, Any],
156157
) -> SchedulerData:
157158
return SchedulerData.from_service_inspect(mock_service_inspect)
158159

services/director-v2/tests/unit/test_modules_dask_client.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ async def dask_client(
189189
}[request.param]()
190190

191191
try:
192-
assert client.app.state.engine is not None
192+
assert client.app.state.asyncpg_engine is not None
193193

194194
# check we can run some simple python script
195195
def _square(x):
@@ -207,8 +207,8 @@ def neg(x):
207207
result = await future
208208
assert result == -285
209209
except AttributeError:
210-
# enforces existance of 'app.state.engine' and sets to None
211-
client.app.state.engine = None
210+
# enforces existance of 'app.state.asyncpg_engine' and sets to None
211+
client.app.state.asyncpg_engine = None
212212

213213
return client
214214

@@ -390,7 +390,9 @@ def fct_that_raise_exception():
390390
) # type: ignore
391391
assert task_exception
392392
assert isinstance(task_exception, exc)
393-
task_traceback = await future.traceback(timeout=_ALLOW_TIME_FOR_GATEWAY_TO_CREATE_WORKERS) # type: ignore
393+
task_traceback = await future.traceback(
394+
timeout=_ALLOW_TIME_FOR_GATEWAY_TO_CREATE_WORKERS
395+
) # type: ignore
394396
assert task_traceback
395397
trace = traceback.format_exception(task_exception)
396398
assert trace
@@ -625,7 +627,9 @@ def fake_sidecar_fct(
625627
assert published_computation_task[0].job_id in list_of_persisted_datasets
626628
assert list_of_persisted_datasets[0] == published_computation_task[0].job_id
627629
# get the persisted future from the scheduler back
628-
task_future = await dask_client.backend.client.get_dataset(name=published_computation_task[0].job_id) # type: ignore
630+
task_future = await dask_client.backend.client.get_dataset(
631+
name=published_computation_task[0].job_id
632+
) # type: ignore
629633
assert task_future
630634
assert isinstance(task_future, distributed.Future)
631635
assert task_future.key == published_computation_task[0].job_id

services/director-v2/tests/unit/test_modules_osparc_variables.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -280,7 +280,6 @@ def compose_spec():
280280
def test_auto_inject_environments_added_to_all_services_in_compose(
281281
compose_spec: ComposeSpecLabelDict,
282282
):
283-
284283
before = deepcopy(compose_spec)
285284

286285
after = auto_inject_environments(compose_spec)
@@ -290,7 +289,6 @@ def test_auto_inject_environments_added_to_all_services_in_compose(
290289

291290
auto_injected_envs = set(_NEW_ENVIRONMENTS.keys())
292291
for name, service in compose_spec.get("services", {}).items():
293-
294292
# all services have environment specs
295293
assert service["environment"], f"expected in {name} service"
296294

0 commit comments

Comments
 (0)