Skip to content

Commit ea29f63

Browse files
committed
use begin
1 parent d1c9ac7 commit ea29f63

File tree

4 files changed

+57
-11
lines changed

4 files changed

+57
-11
lines changed

services/director-v2/tests/unit/_helpers.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818

1919
@dataclass
2020
class PublishedProject:
21+
user: dict[str, Any]
2122
project: ProjectAtDB
2223
pipeline: CompPipelineAtDB
2324
tasks: list[CompTaskAtDB]

services/director-v2/tests/unit/with_dbs/comp_scheduler/test_db_repositories_comp_runs.py

Lines changed: 43 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,19 @@
1+
# pylint: disable=no-value-for-parameter
2+
# pylint: disable=protected-access
3+
# pylint: disable=redefined-outer-name
4+
# pylint: disable=too-many-arguments
5+
# pylint: disable=unused-argument
6+
# pylint: disable=unused-variable
7+
8+
from typing import Awaitable, Callable
9+
110
import pytest
11+
from _helpers import PublishedProject
12+
from faker import Faker
213
from models_library.projects import ProjectID
314
from models_library.users import UserID
415
from simcore_service_director_v2.core.errors import ComputationalRunNotFoundError
16+
from simcore_service_director_v2.models.comp_runs import CompRunsAtDB
517
from simcore_service_director_v2.modules.db.repositories.comp_runs import (
618
CompRunsRepository,
719
)
@@ -14,9 +26,38 @@
1426
]
1527

1628

17-
async def test_get(aiopg_engine, user_id: UserID, project_id: ProjectID):
29+
@pytest.fixture
30+
def fake_user_id(faker: Faker) -> UserID:
31+
return faker.pyint(min_value=1)
32+
33+
34+
@pytest.fixture
35+
def fake_project_id(faker: Faker) -> ProjectID:
36+
return ProjectID(f"{faker.uuid4(cast_to=None)}")
37+
38+
39+
async def test_get(
40+
aiopg_engine,
41+
fake_user_id: UserID,
42+
fake_project_id: ProjectID,
43+
publish_project: Callable[[], Awaitable[PublishedProject]],
44+
create_comp_run: Callable[..., Awaitable[CompRunsAtDB]],
45+
):
46+
with pytest.raises(ComputationalRunNotFoundError):
47+
await CompRunsRepository(aiopg_engine).get(fake_user_id, fake_project_id)
48+
49+
published_project = await publish_project()
50+
assert published_project.project.prj_owner
51+
# there is still no comp run created
1852
with pytest.raises(ComputationalRunNotFoundError):
19-
await CompRunsRepository(aiopg_engine).get(user_id, project_id)
53+
await CompRunsRepository(aiopg_engine).get(
54+
published_project.project.prj_owner, published_project.project.uuid
55+
)
56+
57+
comp_run = await create_comp_run(published_project.user, published_project.project)
58+
await CompRunsRepository(aiopg_engine).get(
59+
published_project.project.prj_owner, published_project.project.uuid
60+
)
2061

2162

2263
async def test_list():

services/director-v2/tests/unit/with_dbs/comp_scheduler/test_scheduler_dask.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1046,6 +1046,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta
10461046
assert messages[0].node_id == exp_started_task.node_id
10471047

10481048
return RunningProject(
1049+
published_project.user,
10491050
published_project.project,
10501051
published_project.pipeline,
10511052
tasks_in_db,

services/director-v2/tests/unit/with_dbs/conftest.py

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ async def _(**pipeline_kwargs) -> CompPipelineAtDB:
6767
yield _
6868

6969
# cleanup
70-
async with sqlalchemy_async_engine.connect() as conn:
70+
async with sqlalchemy_async_engine.begin() as conn:
7171
await conn.execute(
7272
comp_pipeline.delete().where(
7373
comp_pipeline.c.project_id.in_(created_pipeline_ids)
@@ -123,7 +123,7 @@ async def _(
123123
),
124124
"node_class": to_node_class(node_data.key),
125125
"internal_id": internal_id + 1,
126-
"submit": datetime.datetime.now(tz=datetime.UTC),
126+
"submit": datetime.datetime.now(),
127127
"job_id": generate_dask_job_id(
128128
service_key=node_data.key,
129129
service_version=node_data.version,
@@ -133,7 +133,7 @@ async def _(
133133
),
134134
}
135135
task_config.update(**overrides_kwargs)
136-
async with sqlalchemy_async_engine.connect() as conn:
136+
async with sqlalchemy_async_engine.begin() as conn:
137137
result = await conn.execute(
138138
comp_tasks.insert()
139139
.values(**task_config)
@@ -147,7 +147,7 @@ async def _(
147147
yield _
148148

149149
# cleanup
150-
async with sqlalchemy_async_engine.connect() as conn:
150+
async with sqlalchemy_async_engine.begin() as conn:
151151
await conn.execute(
152152
comp_tasks.delete().where(comp_tasks.c.task_id.in_(created_task_ids))
153153
)
@@ -197,14 +197,14 @@ async def _(
197197
) -> CompRunsAtDB:
198198
run_config = {
199199
"project_uuid": f"{project.uuid}",
200-
"user_id": f"{user['id']}",
200+
"user_id": user["id"],
201201
"iteration": 1,
202202
"result": StateType.NOT_STARTED,
203203
"metadata": run_metadata,
204204
"use_on_demand_clusters": False,
205205
}
206206
run_config.update(**run_kwargs)
207-
async with sqlalchemy_async_engine.connect() as conn:
207+
async with sqlalchemy_async_engine.begin() as conn:
208208
result = await conn.execute(
209209
comp_runs.insert()
210210
.values(**jsonable_encoder(run_config))
@@ -217,7 +217,7 @@ async def _(
217217
yield _
218218

219219
# cleanup
220-
async with sqlalchemy_async_engine.connect() as conn:
220+
async with sqlalchemy_async_engine.begin() as conn:
221221
await conn.execute(
222222
comp_runs.delete().where(comp_runs.c.run_id.in_(created_run_ids))
223223
)
@@ -242,7 +242,7 @@ async def _(user: dict[str, Any], **cluster_kwargs) -> Cluster:
242242
new_cluster = Cluster.model_validate(cluster_config)
243243
assert new_cluster
244244

245-
async with sqlalchemy_async_engine.connect() as conn:
245+
async with sqlalchemy_async_engine.begin() as conn:
246246
# insert basic cluster
247247
created_cluster = (
248248
await conn.execute(
@@ -298,7 +298,7 @@ async def _(user: dict[str, Any], **cluster_kwargs) -> Cluster:
298298
yield _
299299

300300
# cleanup
301-
async with sqlalchemy_async_engine.connect() as conn:
301+
async with sqlalchemy_async_engine.begin() as conn:
302302
await conn.execute(
303303
clusters.delete().where(clusters.c.id.in_(created_cluster_ids))
304304
)
@@ -318,6 +318,7 @@ async def publish_project(
318318
async def _() -> PublishedProject:
319319
created_project = await project(user, workbench=fake_workbench_without_outputs)
320320
return PublishedProject(
321+
user=user,
321322
project=created_project,
322323
pipeline=await create_pipeline(
323324
project_id=f"{created_project.uuid}",
@@ -352,6 +353,7 @@ async def running_project(
352353
created_project = await project(user, workbench=fake_workbench_without_outputs)
353354
now_time = arrow.utcnow().datetime
354355
return RunningProject(
356+
user=user,
355357
project=created_project,
356358
pipeline=await create_pipeline(
357359
project_id=f"{created_project.uuid}",
@@ -388,6 +390,7 @@ async def running_project_mark_for_cancellation(
388390
created_project = await project(user, workbench=fake_workbench_without_outputs)
389391
now_time = arrow.utcnow().datetime
390392
return RunningProject(
393+
user=user,
391394
project=created_project,
392395
pipeline=await create_pipeline(
393396
project_id=f"{created_project.uuid}",

0 commit comments

Comments
 (0)