Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 28 additions & 23 deletions packages/pytest-simcore/src/pytest_simcore/db_entries_mocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
from models_library.products import ProductName
from models_library.projects import ProjectAtDB, ProjectID
from models_library.projects_nodes_io import NodeID
from pytest_simcore.helpers.postgres_tools import insert_and_get_row_lifespan
from simcore_postgres_database.models.comp_pipeline import StateType, comp_pipeline
from simcore_postgres_database.models.comp_tasks import comp_tasks
from simcore_postgres_database.models.products import products
Expand All @@ -27,14 +26,16 @@
)
from sqlalchemy.ext.asyncio import AsyncEngine

from .helpers.postgres_tools import insert_and_get_row_lifespan


@pytest.fixture()
def create_registered_user(
postgres_db: sa.engine.Engine, faker: Faker
) -> Iterator[Callable[..., dict]]:
created_user_ids = []

def creator(**user_kwargs) -> dict[str, Any]:
def _(**user_kwargs) -> dict[str, Any]:
with postgres_db.connect() as con:
# removes all users before continuing
user_config = {
Expand All @@ -60,15 +61,15 @@ def creator(**user_kwargs) -> dict[str, Any]:
created_user_ids.append(user["id"])
return dict(user._asdict())

yield creator
yield _

with postgres_db.connect() as con:
con.execute(users.delete().where(users.c.id.in_(created_user_ids)))
print(f"<-- deleted users {created_user_ids=}")


@pytest.fixture
async def product_db(
async def with_product(
sqlalchemy_async_engine: AsyncEngine, product: dict[str, Any]
) -> AsyncIterator[dict[str, Any]]:
async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup
Expand All @@ -81,12 +82,12 @@ async def product_db(


@pytest.fixture
async def project(
async def create_project(
sqlalchemy_async_engine: AsyncEngine, faker: Faker, product_name: ProductName
) -> AsyncIterator[Callable[..., Awaitable[ProjectAtDB]]]:
created_project_ids: list[str] = []

async def creator(
async def _(
user: dict[str, Any],
*,
project_nodes_overrides: dict[str, Any] | None = None,
Expand Down Expand Up @@ -140,7 +141,7 @@ async def creator(
created_project_ids.append(f"{inserted_project.uuid}")
return inserted_project

yield creator
yield _

# cleanup
async with sqlalchemy_async_engine.begin() as con:
Expand All @@ -151,18 +152,20 @@ async def creator(


@pytest.fixture
def pipeline(postgres_db: sa.engine.Engine) -> Iterator[Callable[..., dict[str, Any]]]:
async def create_pipeline(
sqlalchemy_async_engine: AsyncEngine,
) -> AsyncIterator[Callable[..., Awaitable[dict[str, Any]]]]:
created_pipeline_ids: list[str] = []

def creator(**pipeline_kwargs) -> dict[str, Any]:
async def _(**pipeline_kwargs) -> dict[str, Any]:
pipeline_config = {
"project_id": f"{uuid4()}",
"dag_adjacency_list": {},
"state": StateType.NOT_STARTED,
}
pipeline_config.update(**pipeline_kwargs)
with postgres_db.connect() as conn:
result = conn.execute(
async with sqlalchemy_async_engine.begin() as conn:
result = await conn.execute(
comp_pipeline.insert()
.values(**pipeline_config)
.returning(sa.literal_column("*"))
Expand All @@ -172,25 +175,27 @@ def creator(**pipeline_kwargs) -> dict[str, Any]:
created_pipeline_ids.append(new_pipeline["project_id"])
return new_pipeline

yield creator
yield _

# cleanup
with postgres_db.connect() as conn:
conn.execute(
async with sqlalchemy_async_engine.begin() as conn:
await conn.execute(
comp_pipeline.delete().where(
comp_pipeline.c.project_id.in_(created_pipeline_ids)
)
)


@pytest.fixture
def comp_task(postgres_db: sa.engine.Engine) -> Iterator[Callable[..., dict[str, Any]]]:
async def create_comp_task(
sqlalchemy_async_engine: AsyncEngine,
) -> AsyncIterator[Callable[..., Awaitable[dict[str, Any]]]]:
created_task_ids: list[int] = []

def creator(project_id: ProjectID, **task_kwargs) -> dict[str, Any]:
async def _(project_id: ProjectID, **task_kwargs) -> dict[str, Any]:
task_config = {"project_id": f"{project_id}"} | task_kwargs
with postgres_db.connect() as conn:
result = conn.execute(
async with sqlalchemy_async_engine.begin() as conn:
result = await conn.execute(
comp_tasks.insert()
.values(**task_config)
.returning(sa.literal_column("*"))
Expand All @@ -200,11 +205,11 @@ def creator(project_id: ProjectID, **task_kwargs) -> dict[str, Any]:
created_task_ids.append(new_task["task_id"])
return new_task

yield creator
yield _

# cleanup
with postgres_db.connect() as conn:
conn.execute(
async with sqlalchemy_async_engine.begin() as conn:
await conn.execute(
comp_tasks.delete().where(comp_tasks.c.task_id.in_(created_task_ids))
)

Expand All @@ -219,7 +224,7 @@ def grant_service_access_rights(
"""
created_entries: list[tuple[str, str, int, str]] = []

def creator(
def _(
*,
service_key: str,
service_version: str,
Expand Down Expand Up @@ -263,7 +268,7 @@ def creator(
# Convert row to dict
return dict(row._asdict())

yield creator
yield _

# Cleanup all created entries
with postgres_db.begin() as conn:
Expand Down
4 changes: 2 additions & 2 deletions packages/simcore-sdk/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
pytest_plugins = [
"pytest_simcore.aws_s3_service",
"pytest_simcore.aws_server",
"pytest_simcore.db_entries_mocks",
"pytest_simcore.disk_usage_monitoring",
"pytest_simcore.docker_compose",
"pytest_simcore.docker_swarm",
Expand Down Expand Up @@ -66,8 +67,7 @@ def empty_configuration_file() -> Path:
@pytest.fixture
def node_ports_config(
postgres_host_config: PostgresTestConfig, minio_s3_settings_envs: EnvVarsDict
) -> None:
...
) -> None: ...


@pytest.fixture
Expand Down
55 changes: 13 additions & 42 deletions packages/simcore-sdk/tests/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
from settings_library.aws_s3_cli import AwsS3CliSettings
from settings_library.r_clone import RCloneSettings, S3Provider
from settings_library.s3 import S3Settings
from simcore_postgres_database.models.comp_pipeline import comp_pipeline
from simcore_postgres_database.models.comp_tasks import comp_tasks
from simcore_postgres_database.models.file_meta_data import file_meta_data
from simcore_postgres_database.models.projects import projects
Expand Down Expand Up @@ -100,17 +99,17 @@ def _create(key: str, file_path: Path) -> SimcoreS3FileID:


@pytest.fixture()
def default_configuration(
async def default_configuration(
node_ports_config: None,
create_pipeline: Callable[[str], str],
create_pipeline: Callable[..., Awaitable[dict[str, Any]]],
create_task: Callable[..., str],
default_configuration_file: Path,
project_id: str,
node_uuid: str,
) -> dict[str, Any]:
# prepare database with default configuration
json_configuration = default_configuration_file.read_text()
create_pipeline(project_id)
await create_pipeline(project_id=project_id)
return _set_configuration(create_task, project_id, node_uuid, json_configuration)


Expand Down Expand Up @@ -167,15 +166,15 @@ async def _create(file_path: Path) -> dict[str, Any]:


@pytest.fixture()
def create_special_configuration(
async def create_special_configuration(
node_ports_config: None,
create_pipeline: Callable[[str], str],
create_pipeline: Callable[..., Awaitable[dict[str, Any]]],
create_task: Callable[..., str],
empty_configuration_file: Path,
project_id: str,
node_uuid: str,
) -> Callable:
def _create(
) -> Callable[..., Awaitable[tuple[dict, str, str]]]:
async def _create(
inputs: list[tuple[str, str, Any]] | None = None,
outputs: list[tuple[str, str, Any]] | None = None,
project_id: str = project_id,
Expand All @@ -184,7 +183,7 @@ def _create(
config_dict = json.loads(empty_configuration_file.read_text())
_assign_config(config_dict, "inputs", inputs if inputs else [])
_assign_config(config_dict, "outputs", outputs if outputs else [])
project_id = create_pipeline(project_id)
await create_pipeline(project_id=project_id)
config_dict = _set_configuration(
create_task, project_id, node_id, json.dumps(config_dict)
)
Expand All @@ -194,13 +193,13 @@ def _create(


@pytest.fixture()
def create_2nodes_configuration(
async def create_2nodes_configuration(
node_ports_config: None,
create_pipeline: Callable[[str], str],
create_pipeline: Callable[..., Awaitable[dict[str, Any]]],
create_task: Callable[..., str],
empty_configuration_file: Path,
) -> Callable:
def _create(
) -> Callable[..., Awaitable[tuple[dict, str, str]]]:
async def _create(
prev_node_inputs: list[tuple[str, str, Any]],
prev_node_outputs: list[tuple[str, str, Any]],
inputs: list[tuple[str, str, Any]],
Expand All @@ -209,7 +208,7 @@ def _create(
previous_node_id: str,
node_id: str,
) -> tuple[dict, str, str]:
create_pipeline(project_id)
await create_pipeline(project_id=project_id)

# create previous node
previous_config_dict = json.loads(empty_configuration_file.read_text())
Expand Down Expand Up @@ -241,34 +240,6 @@ def _create(
return _create


@pytest.fixture
def create_pipeline(postgres_db: sa.engine.Engine) -> Iterator[Callable[[str], str]]:
created_pipeline_ids: list[str] = []

def _create(project_id: str) -> str:
with postgres_db.connect() as conn:
result = conn.execute(
comp_pipeline.insert() # pylint: disable=no-value-for-parameter
.values(project_id=project_id)
.returning(comp_pipeline.c.project_id)
)
row = result.first()
assert row
new_pipeline_id = row[comp_pipeline.c.project_id]
created_pipeline_ids.append(f"{new_pipeline_id}")
return new_pipeline_id

yield _create

# cleanup
with postgres_db.connect() as conn:
conn.execute(
comp_pipeline.delete().where( # pylint: disable=no-value-for-parameter
comp_pipeline.c.project_id.in_(created_pipeline_ids)
)
)


@pytest.fixture
def create_task(postgres_db: sa.engine.Engine) -> Iterator[Callable[..., str]]:
created_task_ids: list[int] = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# pylint:disable=redefined-outer-name

import json
from collections.abc import Callable
from collections.abc import Awaitable, Callable
from pathlib import Path

from simcore_sdk.node_ports_common.dbmanager import DBManager
Expand Down Expand Up @@ -35,11 +35,11 @@ async def test_db_manager_write_config(
project_id: str,
node_uuid: str,
node_ports_config: None,
create_special_configuration: Callable,
create_special_configuration: Callable[..., Awaitable[tuple[dict, str, str]]],
default_configuration_file: Path,
):
# create an empty config
create_special_configuration()
await create_special_configuration()
# read the default config
json_configuration = default_configuration_file.read_text()
# write the default config to the database
Expand Down
Loading
Loading