Skip to content

Commit 358e72b

Browse files
committed
Merge branch 'master' into mai/pytest-upgrade-repo-wide
2 parents a95d363 + 4289fb0 commit 358e72b

File tree

51 files changed

+287
-498
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

51 files changed

+287
-498
lines changed

ci/helpers/ensure_python_pip.bash

Lines changed: 0 additions & 26 deletions
This file was deleted.

packages/postgres-database/docker/Dockerfile

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -32,11 +32,6 @@ COPY --from=uv_build /uv /uvx /bin/
3232
# NOTE: python virtualenv is used here such that installed packages may be moved to production image easily by copying the venv
3333
RUN uv venv "${VIRTUAL_ENV}"
3434

35-
RUN --mount=type=cache,target=/root/.cache/uv \
36-
uv pip install --upgrade \
37-
wheel \
38-
setuptools
39-
4035
ARG GIT_BRANCH
4136
ARG GIT_REPOSITORY
4237

packages/postgres-database/scripts/erd/Dockerfile

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,11 +20,6 @@ RUN apt-get update \
2020
&& apt-get clean
2121

2222

23-
RUN --mount=type=cache,target=/root/.cache/uv \
24-
uv pip install --upgrade \
25-
wheel \
26-
setuptools
27-
2823

2924
# devenv
3025
COPY requirements.txt requirements.txt

packages/pytest-simcore/src/pytest_simcore/db_entries_mocks.py

Lines changed: 28 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313
from models_library.products import ProductName
1414
from models_library.projects import ProjectAtDB, ProjectID
1515
from models_library.projects_nodes_io import NodeID
16-
from pytest_simcore.helpers.postgres_tools import insert_and_get_row_lifespan
1716
from simcore_postgres_database.models.comp_pipeline import StateType, comp_pipeline
1817
from simcore_postgres_database.models.comp_tasks import comp_tasks
1918
from simcore_postgres_database.models.products import products
@@ -27,14 +26,16 @@
2726
)
2827
from sqlalchemy.ext.asyncio import AsyncEngine
2928

29+
from .helpers.postgres_tools import insert_and_get_row_lifespan
30+
3031

3132
@pytest.fixture()
3233
def create_registered_user(
3334
postgres_db: sa.engine.Engine, faker: Faker
3435
) -> Iterator[Callable[..., dict]]:
3536
created_user_ids = []
3637

37-
def creator(**user_kwargs) -> dict[str, Any]:
38+
def _(**user_kwargs) -> dict[str, Any]:
3839
with postgres_db.connect() as con:
3940
# removes all users before continuing
4041
user_config = {
@@ -60,15 +61,15 @@ def creator(**user_kwargs) -> dict[str, Any]:
6061
created_user_ids.append(user["id"])
6162
return dict(user._asdict())
6263

63-
yield creator
64+
yield _
6465

6566
with postgres_db.connect() as con:
6667
con.execute(users.delete().where(users.c.id.in_(created_user_ids)))
6768
print(f"<-- deleted users {created_user_ids=}")
6869

6970

7071
@pytest.fixture
71-
async def product_db(
72+
async def with_product(
7273
sqlalchemy_async_engine: AsyncEngine, product: dict[str, Any]
7374
) -> AsyncIterator[dict[str, Any]]:
7475
async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup
@@ -81,12 +82,12 @@ async def product_db(
8182

8283

8384
@pytest.fixture
84-
async def project(
85+
async def create_project(
8586
sqlalchemy_async_engine: AsyncEngine, faker: Faker, product_name: ProductName
8687
) -> AsyncIterator[Callable[..., Awaitable[ProjectAtDB]]]:
8788
created_project_ids: list[str] = []
8889

89-
async def creator(
90+
async def _(
9091
user: dict[str, Any],
9192
*,
9293
project_nodes_overrides: dict[str, Any] | None = None,
@@ -140,7 +141,7 @@ async def creator(
140141
created_project_ids.append(f"{inserted_project.uuid}")
141142
return inserted_project
142143

143-
yield creator
144+
yield _
144145

145146
# cleanup
146147
async with sqlalchemy_async_engine.begin() as con:
@@ -151,18 +152,20 @@ async def creator(
151152

152153

153154
@pytest.fixture
154-
def pipeline(postgres_db: sa.engine.Engine) -> Iterator[Callable[..., dict[str, Any]]]:
155+
async def create_pipeline(
156+
sqlalchemy_async_engine: AsyncEngine,
157+
) -> AsyncIterator[Callable[..., Awaitable[dict[str, Any]]]]:
155158
created_pipeline_ids: list[str] = []
156159

157-
def creator(**pipeline_kwargs) -> dict[str, Any]:
160+
async def _(**pipeline_kwargs) -> dict[str, Any]:
158161
pipeline_config = {
159162
"project_id": f"{uuid4()}",
160163
"dag_adjacency_list": {},
161164
"state": StateType.NOT_STARTED,
162165
}
163166
pipeline_config.update(**pipeline_kwargs)
164-
with postgres_db.connect() as conn:
165-
result = conn.execute(
167+
async with sqlalchemy_async_engine.begin() as conn:
168+
result = await conn.execute(
166169
comp_pipeline.insert()
167170
.values(**pipeline_config)
168171
.returning(sa.literal_column("*"))
@@ -172,25 +175,27 @@ def creator(**pipeline_kwargs) -> dict[str, Any]:
172175
created_pipeline_ids.append(new_pipeline["project_id"])
173176
return new_pipeline
174177

175-
yield creator
178+
yield _
176179

177180
# cleanup
178-
with postgres_db.connect() as conn:
179-
conn.execute(
181+
async with sqlalchemy_async_engine.begin() as conn:
182+
await conn.execute(
180183
comp_pipeline.delete().where(
181184
comp_pipeline.c.project_id.in_(created_pipeline_ids)
182185
)
183186
)
184187

185188

186189
@pytest.fixture
187-
def comp_task(postgres_db: sa.engine.Engine) -> Iterator[Callable[..., dict[str, Any]]]:
190+
async def create_comp_task(
191+
sqlalchemy_async_engine: AsyncEngine,
192+
) -> AsyncIterator[Callable[..., Awaitable[dict[str, Any]]]]:
188193
created_task_ids: list[int] = []
189194

190-
def creator(project_id: ProjectID, **task_kwargs) -> dict[str, Any]:
195+
async def _(project_id: ProjectID, **task_kwargs) -> dict[str, Any]:
191196
task_config = {"project_id": f"{project_id}"} | task_kwargs
192-
with postgres_db.connect() as conn:
193-
result = conn.execute(
197+
async with sqlalchemy_async_engine.begin() as conn:
198+
result = await conn.execute(
194199
comp_tasks.insert()
195200
.values(**task_config)
196201
.returning(sa.literal_column("*"))
@@ -200,11 +205,11 @@ def creator(project_id: ProjectID, **task_kwargs) -> dict[str, Any]:
200205
created_task_ids.append(new_task["task_id"])
201206
return new_task
202207

203-
yield creator
208+
yield _
204209

205210
# cleanup
206-
with postgres_db.connect() as conn:
207-
conn.execute(
211+
async with sqlalchemy_async_engine.begin() as conn:
212+
await conn.execute(
208213
comp_tasks.delete().where(comp_tasks.c.task_id.in_(created_task_ids))
209214
)
210215

@@ -219,7 +224,7 @@ def grant_service_access_rights(
219224
"""
220225
created_entries: list[tuple[str, str, int, str]] = []
221226

222-
def creator(
227+
def _(
223228
*,
224229
service_key: str,
225230
service_version: str,
@@ -263,7 +268,7 @@ def creator(
263268
# Convert row to dict
264269
return dict(row._asdict())
265270

266-
yield creator
271+
yield _
267272

268273
# Cleanup all created entries
269274
with postgres_db.begin() as conn:

packages/service-integration/Dockerfile

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -74,13 +74,6 @@ COPY --from=uv_build /uv /uvx /bin/
7474
RUN uv venv "${VIRTUAL_ENV}"
7575

7676

77-
78-
RUN --mount=type=cache,target=/root/.cache/uv \
79-
uv pip install --upgrade \
80-
pip~=24.0 \
81-
wheel \
82-
setuptools
83-
8477
WORKDIR /build/packages/service-integration
8578

8679
RUN \

packages/simcore-sdk/tests/conftest.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
pytest_plugins = [
2424
"pytest_simcore.aws_s3_service",
2525
"pytest_simcore.aws_server",
26+
"pytest_simcore.db_entries_mocks",
2627
"pytest_simcore.disk_usage_monitoring",
2728
"pytest_simcore.docker_compose",
2829
"pytest_simcore.docker_swarm",
@@ -66,8 +67,7 @@ def empty_configuration_file() -> Path:
6667
@pytest.fixture
6768
def node_ports_config(
6869
postgres_host_config: PostgresTestConfig, minio_s3_settings_envs: EnvVarsDict
69-
) -> None:
70-
...
70+
) -> None: ...
7171

7272

7373
@pytest.fixture

packages/simcore-sdk/tests/integration/conftest.py

Lines changed: 13 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222
from settings_library.aws_s3_cli import AwsS3CliSettings
2323
from settings_library.r_clone import RCloneSettings, S3Provider
2424
from settings_library.s3 import S3Settings
25-
from simcore_postgres_database.models.comp_pipeline import comp_pipeline
2625
from simcore_postgres_database.models.comp_tasks import comp_tasks
2726
from simcore_postgres_database.models.file_meta_data import file_meta_data
2827
from simcore_postgres_database.models.projects import projects
@@ -100,17 +99,17 @@ def _create(key: str, file_path: Path) -> SimcoreS3FileID:
10099

101100

102101
@pytest.fixture()
103-
def default_configuration(
102+
async def default_configuration(
104103
node_ports_config: None,
105-
create_pipeline: Callable[[str], str],
104+
create_pipeline: Callable[..., Awaitable[dict[str, Any]]],
106105
create_task: Callable[..., str],
107106
default_configuration_file: Path,
108107
project_id: str,
109108
node_uuid: str,
110109
) -> dict[str, Any]:
111110
# prepare database with default configuration
112111
json_configuration = default_configuration_file.read_text()
113-
create_pipeline(project_id)
112+
await create_pipeline(project_id=project_id)
114113
return _set_configuration(create_task, project_id, node_uuid, json_configuration)
115114

116115

@@ -167,15 +166,15 @@ async def _create(file_path: Path) -> dict[str, Any]:
167166

168167

169168
@pytest.fixture()
170-
def create_special_configuration(
169+
async def create_special_configuration(
171170
node_ports_config: None,
172-
create_pipeline: Callable[[str], str],
171+
create_pipeline: Callable[..., Awaitable[dict[str, Any]]],
173172
create_task: Callable[..., str],
174173
empty_configuration_file: Path,
175174
project_id: str,
176175
node_uuid: str,
177-
) -> Callable:
178-
def _create(
176+
) -> Callable[..., Awaitable[tuple[dict, str, str]]]:
177+
async def _create(
179178
inputs: list[tuple[str, str, Any]] | None = None,
180179
outputs: list[tuple[str, str, Any]] | None = None,
181180
project_id: str = project_id,
@@ -184,7 +183,7 @@ def _create(
184183
config_dict = json.loads(empty_configuration_file.read_text())
185184
_assign_config(config_dict, "inputs", inputs if inputs else [])
186185
_assign_config(config_dict, "outputs", outputs if outputs else [])
187-
project_id = create_pipeline(project_id)
186+
await create_pipeline(project_id=project_id)
188187
config_dict = _set_configuration(
189188
create_task, project_id, node_id, json.dumps(config_dict)
190189
)
@@ -194,13 +193,13 @@ def _create(
194193

195194

196195
@pytest.fixture()
197-
def create_2nodes_configuration(
196+
async def create_2nodes_configuration(
198197
node_ports_config: None,
199-
create_pipeline: Callable[[str], str],
198+
create_pipeline: Callable[..., Awaitable[dict[str, Any]]],
200199
create_task: Callable[..., str],
201200
empty_configuration_file: Path,
202-
) -> Callable:
203-
def _create(
201+
) -> Callable[..., Awaitable[tuple[dict, str, str]]]:
202+
async def _create(
204203
prev_node_inputs: list[tuple[str, str, Any]],
205204
prev_node_outputs: list[tuple[str, str, Any]],
206205
inputs: list[tuple[str, str, Any]],
@@ -209,7 +208,7 @@ def _create(
209208
previous_node_id: str,
210209
node_id: str,
211210
) -> tuple[dict, str, str]:
212-
create_pipeline(project_id)
211+
await create_pipeline(project_id=project_id)
213212

214213
# create previous node
215214
previous_config_dict = json.loads(empty_configuration_file.read_text())
@@ -241,34 +240,6 @@ def _create(
241240
return _create
242241

243242

244-
@pytest.fixture
245-
def create_pipeline(postgres_db: sa.engine.Engine) -> Iterator[Callable[[str], str]]:
246-
created_pipeline_ids: list[str] = []
247-
248-
def _create(project_id: str) -> str:
249-
with postgres_db.connect() as conn:
250-
result = conn.execute(
251-
comp_pipeline.insert() # pylint: disable=no-value-for-parameter
252-
.values(project_id=project_id)
253-
.returning(comp_pipeline.c.project_id)
254-
)
255-
row = result.first()
256-
assert row
257-
new_pipeline_id = row[comp_pipeline.c.project_id]
258-
created_pipeline_ids.append(f"{new_pipeline_id}")
259-
return new_pipeline_id
260-
261-
yield _create
262-
263-
# cleanup
264-
with postgres_db.connect() as conn:
265-
conn.execute(
266-
comp_pipeline.delete().where( # pylint: disable=no-value-for-parameter
267-
comp_pipeline.c.project_id.in_(created_pipeline_ids)
268-
)
269-
)
270-
271-
272243
@pytest.fixture
273244
def create_task(postgres_db: sa.engine.Engine) -> Iterator[Callable[..., str]]:
274245
created_task_ids: list[int] = []

packages/simcore-sdk/tests/integration/test_node_ports_common_dbmanager.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
# pylint:disable=redefined-outer-name
44

55
import json
6-
from collections.abc import Callable
6+
from collections.abc import Awaitable, Callable
77
from pathlib import Path
88

99
from simcore_sdk.node_ports_common.dbmanager import DBManager
@@ -35,11 +35,11 @@ async def test_db_manager_write_config(
3535
project_id: str,
3636
node_uuid: str,
3737
node_ports_config: None,
38-
create_special_configuration: Callable,
38+
create_special_configuration: Callable[..., Awaitable[tuple[dict, str, str]]],
3939
default_configuration_file: Path,
4040
):
4141
# create an empty config
42-
create_special_configuration()
42+
await create_special_configuration()
4343
# read the default config
4444
json_configuration = default_configuration_file.read_text()
4545
# write the default config to the database

0 commit comments

Comments
 (0)