diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 0db4b2febd3..d0619a856a2 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -415,7 +415,7 @@ def log_context( if extra: kwargs["extra"] = extra log_msg = f"Starting {msg} ..." - logger.log(level, log_msg, *args, **kwargs) + logger.log(level, log_msg, *args, **kwargs, stacklevel=3) yield duration = ( f" in {(datetime.now() - start ).total_seconds()}s" # noqa: DTZ005 @@ -423,7 +423,7 @@ def log_context( else "" ) log_msg = f"Finished {msg}{duration}" - logger.log(level, log_msg, *args, **kwargs) + logger.log(level, log_msg, *args, **kwargs, stacklevel=3) def guess_message_log_level(message: str) -> LogLevelInt: diff --git a/services/docker-compose.yml b/services/docker-compose.yml index ae2259d3773..9a7271b4836 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -550,27 +550,38 @@ services: networks: - default environment: - LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} + DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} + DIRECTOR_V2_PORT: ${DIRECTOR_V2_PORT} + + DYNAMIC_SCHEDULER_LOGLEVEL: ${DYNAMIC_SCHEDULER_LOGLEVEL} + DYNAMIC_SCHEDULER_PROFILING: ${DYNAMIC_SCHEDULER_PROFILING} + DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT: ${DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT} + DYNAMIC_SCHEDULER_TRACING: ${DYNAMIC_SCHEDULER_TRACING} + DYNAMIC_SCHEDULER_UI_STORAGE_SECRET: ${DYNAMIC_SCHEDULER_UI_STORAGE_SECRET} + DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER: ${DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER} + DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT: ${DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT} + LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} + + POSTGRES_DB: ${POSTGRES_DB} + POSTGRES_HOST: ${POSTGRES_HOST} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + POSTGRES_PORT: ${POSTGRES_PORT} + POSTGRES_USER: ${POSTGRES_USER} + RABBIT_HOST: ${RABBIT_HOST} RABBIT_PASSWORD: ${RABBIT_PASSWORD} RABBIT_PORT: ${RABBIT_PORT} RABBIT_SECURE: ${RABBIT_SECURE} RABBIT_USER: ${RABBIT_USER} + REDIS_HOST: ${REDIS_HOST} + REDIS_PASSWORD: ${REDIS_PASSWORD} REDIS_PORT: ${REDIS_PORT} REDIS_SECURE: ${REDIS_SECURE} REDIS_USER: ${REDIS_USER} - REDIS_PASSWORD: ${REDIS_PASSWORD} - DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} - DIRECTOR_V2_PORT: ${DIRECTOR_V2_PORT} - DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER: ${DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER} - DYNAMIC_SCHEDULER_LOGLEVEL: ${DYNAMIC_SCHEDULER_LOGLEVEL} - DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT: ${DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT} - DYNAMIC_SCHEDULER_PROFILING: ${DYNAMIC_SCHEDULER_PROFILING} - DYNAMIC_SCHEDULER_TRACING: ${DYNAMIC_SCHEDULER_TRACING} - DYNAMIC_SCHEDULER_UI_STORAGE_SECRET: ${DYNAMIC_SCHEDULER_UI_STORAGE_SECRET} - DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT: ${DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT} + TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} static-webserver: diff --git a/services/dynamic-scheduler/requirements/_test.in b/services/dynamic-scheduler/requirements/_test.in index 1bc0580e049..c7e7c2c0ad1 100644 --- a/services/dynamic-scheduler/requirements/_test.in +++ b/services/dynamic-scheduler/requirements/_test.in @@ -10,7 +10,7 @@ # --constraint _base.txt - +aiopg[sa] asgi_lifespan coverage docker diff --git a/services/dynamic-scheduler/requirements/_test.txt b/services/dynamic-scheduler/requirements/_test.txt index 9be4c5e4f9f..93818a2db05 100644 --- a/services/dynamic-scheduler/requirements/_test.txt +++ b/services/dynamic-scheduler/requirements/_test.txt @@ -1,9 +1,13 @@ +aiopg==1.4.0 + # via -r requirements/_test.in anyio==4.6.2.post1 # via # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in +async-timeout==4.0.3 + # via aiopg certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt @@ -27,6 +31,7 @@ greenlet==3.1.1 # via # -c requirements/_base.txt # playwright + # sqlalchemy h11==0.14.0 # via # -c requirements/_base.txt @@ -73,6 +78,11 @@ pprintpp==0.4.0 # via pytest-icdiff priority==2.0.0 # via hypercorn +psycopg2-binary==2.9.10 + # via + # -c requirements/_base.txt + # aiopg + # sqlalchemy pyee==12.0.0 # via playwright pytest==8.3.4 @@ -121,6 +131,11 @@ sniffio==1.3.1 # anyio # asgi-lifespan # httpx +sqlalchemy==1.4.54 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # aiopg termcolor==2.5.0 # via pytest-sugar typing-extensions==4.12.2 diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py index 0b7d56fccda..b6322a3e0ab 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py @@ -2,6 +2,7 @@ import os import typer +from settings_library.postgres import PostgresSettings from settings_library.rabbit import RabbitSettings from settings_library.utils_cli import ( create_settings_command, @@ -56,6 +57,21 @@ def echo_dotenv(ctx: typer.Context, *, minimal: bool = True): "DYNAMIC_SCHEDULER_UI_STORAGE_SECRET", "replace-with-ui-storage-secret", ), + DYNAMIC_SCHEDULER_POSTGRES=os.environ.get( + "DYNAMIC_SCHEDULER_POSTGRES", + PostgresSettings.create_from_envs( + POSTGRES_HOST=os.environ.get( + "POSTGRES_HOST", "replace-with-postgres-host" + ), + POSTGRES_USER=os.environ.get( + "POSTGRES_USER", "replace-with-postgres-user" + ), + POSTGRES_PASSWORD=os.environ.get( + "POSTGRES_PASSWORD", "replace-with-postgres-password" + ), + POSTGRES_DB=os.environ.get("POSTGRES_DB", "replace-with-postgres-db"), + ), + ), ) print_as_envfile( diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py index f2bde3bfd88..8b4d2be0a18 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py @@ -27,6 +27,7 @@ from ..services.director_v0 import lifespan_director_v0 from ..services.director_v2 import lifespan_director_v2 from ..services.notifier import get_notifier_lifespans +from ..services.postgres import lifespan_postgres from ..services.rabbitmq import lifespan_rabbitmq from ..services.redis import lifespan_redis from ..services.service_tracker import lifespan_service_tracker @@ -50,6 +51,7 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: lifespan_rabbitmq, lifespan_rpc_api_routes, lifespan_redis, + lifespan_postgres, *get_notifier_lifespans(), lifespan_service_tracker, lifespan_deferred_manager, diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py index 9531641897f..f31e29c8adb 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py @@ -8,6 +8,7 @@ from settings_library.director_v0 import DirectorV0Settings from settings_library.director_v2 import DirectorV2Settings from settings_library.http_client_request import ClientRequestSettings +from settings_library.postgres import PostgresSettings from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings from settings_library.tracing import TracingSettings @@ -144,6 +145,14 @@ class ApplicationSettings(_BaseApplicationSettings): description="settings for director-v2 service", ) + DYNAMIC_SCHEDULER_POSTGRES: Annotated[ + PostgresSettings, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="settings for postgres service", + ), + ] + DYNAMIC_SCHEDULER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True DYNAMIC_SCHEDULER_PROFILING: bool = False diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/postgres/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/postgres/__init__.py new file mode 100644 index 00000000000..240e9a7f991 --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/postgres/__init__.py @@ -0,0 +1,8 @@ +from ._project_networks import ProjectNetworkNotFoundError, ProjectNetworksRepo +from ._setup import lifespan_postgres + +__all__: tuple[str, ...] = ( + "lifespan_postgres", + "ProjectNetworkNotFoundError", + "ProjectNetworksRepo", +) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/postgres/_project_networks.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/postgres/_project_networks.py new file mode 100644 index 00000000000..a4ef48ce60b --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/postgres/_project_networks.py @@ -0,0 +1,57 @@ +import sqlalchemy as sa +from common_library.errors_classes import OsparcErrorMixin +from models_library.projects import ProjectID +from models_library.projects_networks import NetworksWithAliases, ProjectsNetworks +from simcore_postgres_database.models.projects_networks import projects_networks +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) +from sqlalchemy.dialects.postgresql import insert as pg_insert +from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine + + +class BaseProjectNetwroksError(OsparcErrorMixin, RuntimeError): + msg_template: str = "project networks unexpected error" + + +class ProjectNetworkNotFoundError(BaseProjectNetwroksError): + msg_template: str = "no networks found for project {project_id}" + + +class ProjectNetworksRepo: + def __init__(self, engine: AsyncEngine): + self.engine = engine + + async def get_projects_networks( + self, connection: AsyncConnection | None = None, *, project_id: ProjectID + ) -> ProjectsNetworks: + async with pass_or_acquire_connection(self.engine, connection) as conn: + result = await conn.execute( + sa.select(projects_networks).where( + projects_networks.c.project_uuid == f"{project_id}" + ) + ) + row = result.first() + if not row: + raise ProjectNetworkNotFoundError(project_id=project_id) + return ProjectsNetworks.model_validate(row) + + async def upsert_projects_networks( + self, + connection: AsyncConnection | None = None, + *, + project_id: ProjectID, + networks_with_aliases: NetworksWithAliases, + ) -> None: + projects_networks_to_insert = ProjectsNetworks.model_validate( + {"project_uuid": project_id, "networks_with_aliases": networks_with_aliases} + ) + + async with transaction_context(self.engine, connection) as conn: + row_data = projects_networks_to_insert.model_dump(mode="json") + insert_stmt = pg_insert(projects_networks).values(**row_data) + upsert_snapshot = insert_stmt.on_conflict_do_update( + index_elements=[projects_networks.c.project_uuid], set_=row_data + ) + await conn.execute(upsert_snapshot) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/postgres/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/postgres/_setup.py new file mode 100644 index 00000000000..07c74e4c7da --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/postgres/_setup.py @@ -0,0 +1,21 @@ +from collections.abc import AsyncIterator + +from fastapi import FastAPI +from fastapi_lifespan_manager import State +from servicelib.db_async_engine import close_db_connection, connect_to_db +from sqlalchemy.ext.asyncio import AsyncEngine + +from ...core.settings import ApplicationSettings + + +async def lifespan_postgres(app: FastAPI) -> AsyncIterator[State]: + settings: ApplicationSettings = app.state.settings + + await connect_to_db(app, settings.DYNAMIC_SCHEDULER_POSTGRES) + assert app.state.engine # nosec + assert isinstance(app.state.engine, AsyncEngine) # nosec + + yield {} + + assert app.state.engine # nosec + await close_db_connection(app) diff --git a/services/dynamic-scheduler/tests/conftest.py b/services/dynamic-scheduler/tests/conftest.py index 3e39bd327c9..cb82e10c4f8 100644 --- a/services/dynamic-scheduler/tests/conftest.py +++ b/services/dynamic-scheduler/tests/conftest.py @@ -25,6 +25,8 @@ "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", "pytest_simcore.faker_projects_data", + "pytest_simcore.faker_users_data", + "pytest_simcore.postgres_service", "pytest_simcore.rabbit_service", "pytest_simcore.redis_service", "pytest_simcore.repository_paths", @@ -115,6 +117,11 @@ def disable_status_monitor_lifespan(mocker: MockerFixture) -> None: mocker.patch(f"{_PATH_APPLICATION}.lifespan_status_monitor") +@pytest.fixture +def disable_postgres_lifespan(mocker: MockerFixture) -> None: + mocker.patch(f"{_PATH_APPLICATION}.lifespan_postgres") + + MAX_TIME_FOR_APP_TO_STARTUP: Final[float] = 10 MAX_TIME_FOR_APP_TO_SHUTDOWN: Final[float] = 10 diff --git a/services/dynamic-scheduler/tests/unit/api_frontend/conftest.py b/services/dynamic-scheduler/tests/unit/api_frontend/conftest.py index be92830ee54..4d36ae33da8 100644 --- a/services/dynamic-scheduler/tests/unit/api_frontend/conftest.py +++ b/services/dynamic-scheduler/tests/unit/api_frontend/conftest.py @@ -9,12 +9,15 @@ from unittest.mock import AsyncMock import pytest +import sqlalchemy as sa from fastapi import FastAPI, status from httpx import AsyncClient from hypercorn.asyncio import serve from hypercorn.config import Config from playwright.async_api import Page, async_playwright from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.postgres_tools import PostgresTestConfig from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings @@ -54,11 +57,20 @@ def mock_remove_tracked_service(mocker: MockerFixture) -> AsyncMock: @pytest.fixture def app_environment( app_environment: EnvVarsDict, + postgres_db: sa.engine.Engine, + postgres_host_config: PostgresTestConfig, disable_status_monitor_background_task: None, rabbit_service: RabbitSettings, redis_service: RedisSettings, remove_redis_data: None, + monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: + setenvs_from_dict( + monkeypatch, + { + "POSTGRES_CLIENT_NAME": "test_postgres_client", + }, + ) return app_environment diff --git a/services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_index.py b/services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_index.py index 73bf844271e..8ba68fbe632 100644 --- a/services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_index.py +++ b/services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_index.py @@ -32,6 +32,7 @@ from tenacity import AsyncRetrying, stop_after_delay, wait_fixed pytest_simcore_core_services_selection = [ + "postgres", "rabbit", "redis", ] diff --git a/services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_service.py b/services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_service.py index edcccb2cab6..a4f0c3993d0 100644 --- a/services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_service.py +++ b/services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_service.py @@ -28,6 +28,7 @@ from tenacity import AsyncRetrying, stop_after_delay, wait_fixed pytest_simcore_core_services_selection = [ + "postgres", "rabbit", "redis", ] diff --git a/services/dynamic-scheduler/tests/unit/api_rest/conftest.py b/services/dynamic-scheduler/tests/unit/api_rest/conftest.py index d7fbda477ff..eafc8a694e9 100644 --- a/services/dynamic-scheduler/tests/unit/api_rest/conftest.py +++ b/services/dynamic-scheduler/tests/unit/api_rest/conftest.py @@ -11,6 +11,7 @@ @pytest.fixture def app_environment( + disable_postgres_lifespan: None, disable_rabbitmq_lifespan: None, disable_redis_lifespan: None, disable_service_tracker_lifespan: None, diff --git a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py index f3380bbb2f5..c9b974e4454 100644 --- a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py +++ b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py @@ -179,6 +179,7 @@ def app_environment( @pytest.fixture async def rpc_client( + disable_postgres_lifespan: None, app_environment: EnvVarsDict, mock_director_v2_service_state: None, mock_director_v0_service_state: None, diff --git a/services/dynamic-scheduler/tests/unit/services/postgres/test__project_networks.py b/services/dynamic-scheduler/tests/unit/services/postgres/test__project_networks.py new file mode 100644 index 00000000000..e5316c200e9 --- /dev/null +++ b/services/dynamic-scheduler/tests/unit/services/postgres/test__project_networks.py @@ -0,0 +1,146 @@ +# pylint:disable=contextmanager-generator-missing-cleanup +# pylint:disable=redefined-outer-name +# pylint:disable=unused-argument + +from collections.abc import AsyncIterator +from typing import Any + +import pytest +import sqlalchemy as sa +from fastapi import FastAPI +from models_library.projects import ProjectID +from models_library.projects_networks import NetworksWithAliases +from models_library.users import UserID +from pydantic import TypeAdapter +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.postgres_tools import ( + PostgresTestConfig, + insert_and_get_row_lifespan, +) +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_postgres_database.models.projects import projects +from simcore_postgres_database.models.users import users +from simcore_service_dynamic_scheduler.services.postgres import ( + ProjectNetworkNotFoundError, + ProjectNetworksRepo, +) +from sqlalchemy.ext.asyncio import AsyncEngine + +pytest_simcore_core_services_selection = [ + "postgres", +] +pytest_simcore_ops_services_selection = [ + "adminer", +] + + +@pytest.fixture +def app_environment( + app_environment: EnvVarsDict, + postgres_db: sa.engine.Engine, + postgres_host_config: PostgresTestConfig, + disable_rabbitmq_lifespan: None, + disable_redis_lifespan: None, + disable_service_tracker_lifespan: None, + disable_deferred_manager_lifespan: None, + disable_notifier_lifespan: None, + disable_status_monitor_lifespan: None, + monkeypatch: pytest.MonkeyPatch, +) -> EnvVarsDict: + setenvs_from_dict( + monkeypatch, + { + "POSTGRES_CLIENT_NAME": "test_postgres_client", + }, + ) + return app_environment + + +@pytest.fixture +def engine(app: FastAPI) -> AsyncEngine: + assert isinstance(app.state.engine, AsyncEngine) + return app.state.engine + + +@pytest.fixture +def user_id() -> UserID: + return 1 + + +@pytest.fixture +async def user_in_db( + engine: AsyncEngine, + user: dict[str, Any], + user_id: UserID, +) -> AsyncIterator[dict[str, Any]]: + """ + injects a user in db + """ + assert user_id == user["id"] + async with insert_and_get_row_lifespan( + engine, + table=users, + values=user, + pk_col=users.c.id, + pk_value=user["id"], + ) as row: + yield row + + +@pytest.fixture +async def project_in_db( + engine: AsyncEngine, + project_id: ProjectID, + project_data: dict[str, Any], + user_in_db: UserID, +) -> AsyncIterator[dict[str, Any]]: + assert f"{project_id}" == project_data["uuid"] + async with insert_and_get_row_lifespan( + engine, + table=projects, + values=project_data, + pk_col=projects.c.uuid, + pk_value=project_data["uuid"], + ) as row: + yield row + + +@pytest.fixture() +def project_networks_repo(engine: AsyncEngine) -> ProjectNetworksRepo: + return ProjectNetworksRepo(engine) + + +@pytest.fixture +def networks_with_aliases() -> NetworksWithAliases: + return TypeAdapter(NetworksWithAliases).validate_python( + NetworksWithAliases.model_json_schema()["examples"][0] + ) + + +async def test_no_project_networks_for_project( + project_networks_repo: ProjectNetworksRepo, + project_in_db: dict[str, Any], + project_id: ProjectID, +): + with pytest.raises(ProjectNetworkNotFoundError): + await project_networks_repo.get_projects_networks(project_id=project_id) + + +async def test_upsert_projects_networks( + project_networks_repo: ProjectNetworksRepo, + project_in_db: dict[str, Any], + project_id: ProjectID, + networks_with_aliases: NetworksWithAliases, +): + + # allows ot test the upsert capabilities + for _ in range(2): + await project_networks_repo.upsert_projects_networks( + project_id=project_id, networks_with_aliases=networks_with_aliases + ) + + project_networks = await project_networks_repo.get_projects_networks( + project_id=project_id + ) + assert project_networks.project_uuid == project_id + assert project_networks.networks_with_aliases == networks_with_aliases diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py b/services/dynamic-scheduler/tests/unit/services/service_tracker/test__api.py similarity index 99% rename from services/dynamic-scheduler/tests/unit/service_tracker/test__api.py rename to services/dynamic-scheduler/tests/unit/services/service_tracker/test__api.py index b8d385089f3..5ce6c8c3d1c 100644 --- a/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py +++ b/services/dynamic-scheduler/tests/unit/services/service_tracker/test__api.py @@ -52,6 +52,7 @@ @pytest.fixture def app_environment( + disable_postgres_lifespan: None, disable_rabbitmq_lifespan: None, disable_deferred_manager_lifespan: None, disable_notifier_lifespan: None, diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__models.py b/services/dynamic-scheduler/tests/unit/services/service_tracker/test__models.py similarity index 100% rename from services/dynamic-scheduler/tests/unit/service_tracker/test__models.py rename to services/dynamic-scheduler/tests/unit/services/service_tracker/test__models.py diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py b/services/dynamic-scheduler/tests/unit/services/service_tracker/test__tracker.py similarity index 98% rename from services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py rename to services/dynamic-scheduler/tests/unit/services/service_tracker/test__tracker.py index 8ad52fd1f9c..818a724c77d 100644 --- a/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py +++ b/services/dynamic-scheduler/tests/unit/services/service_tracker/test__tracker.py @@ -35,6 +35,7 @@ def disable_monitor_task(mocker: MockerFixture) -> None: @pytest.fixture def app_environment( + disable_postgres_lifespan: None, disable_monitor_task: None, disable_rabbitmq_lifespan: None, disable_deferred_manager_lifespan: None, diff --git a/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py b/services/dynamic-scheduler/tests/unit/services/status_monitor/test__monitor.py similarity index 99% rename from services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py rename to services/dynamic-scheduler/tests/unit/services/status_monitor/test__monitor.py index f0bc878fcd9..4b59a9683ab 100644 --- a/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py +++ b/services/dynamic-scheduler/tests/unit/services/status_monitor/test__monitor.py @@ -61,6 +61,7 @@ @pytest.fixture def app_environment( + disable_postgres_lifespan: None, app_environment: EnvVarsDict, rabbit_service: RabbitSettings, redis_service: RedisSettings, diff --git a/services/dynamic-scheduler/tests/unit/test_cli.py b/services/dynamic-scheduler/tests/unit/test_cli.py index 6bdaa62d1e6..f2378e761a2 100644 --- a/services/dynamic-scheduler/tests/unit/test_cli.py +++ b/services/dynamic-scheduler/tests/unit/test_cli.py @@ -1,3 +1,4 @@ +# pylint:disable=redefined-outer-name # pylint:disable=unused-argument import os @@ -29,8 +30,11 @@ def test_cli_help_and_version(cli_runner: CliRunner): assert result.stdout.strip() == API_VERSION -def test_echo_dotenv(cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch): +def test_echo_dotenv( + app_environment: EnvVarsDict, cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch +): # simcore-service-dynamic-scheduler echo-dotenv + ApplicationSettings.create_from_envs() result = cli_runner.invoke(cli_main, "echo-dotenv") assert result.exit_code == os.EX_OK, _format_cli_error(result) @@ -41,25 +45,10 @@ def test_echo_dotenv(cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch): ApplicationSettings.create_from_envs() -def test_list_settings( - cli_runner: CliRunner, app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch -): - with monkeypatch.context() as patch: - setenvs_from_dict( - patch, - { - **app_environment, - "DYNAMIC_SCHEDULER_TRACING": "{}", - "TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT": "http://replace-with-opentelemetry-collector", - "TRACING_OPENTELEMETRY_COLLECTOR_PORT": "4318", - }, - ) - - # simcore-service-dynamic-scheduler settings --show-secrets --as-json - result = cli_runner.invoke( - cli_main, ["settings", "--show-secrets", "--as-json"] - ) - assert result.exit_code == os.EX_OK, _format_cli_error(result) +def test_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): + # simcore-service-dynamic-scheduler settings --show-secrets --as-json + result = cli_runner.invoke(cli_main, ["settings", "--show-secrets", "--as-json"]) + assert result.exit_code == os.EX_OK, _format_cli_error(result) print(result.output) settings = ApplicationSettings(result.output) diff --git a/services/dynamic-scheduler/tests/unit/test_services_director_v0.py b/services/dynamic-scheduler/tests/unit/test_services_director_v0.py index 9ad42924d54..204d66e0d34 100644 --- a/services/dynamic-scheduler/tests/unit/test_services_director_v0.py +++ b/services/dynamic-scheduler/tests/unit/test_services_director_v0.py @@ -21,6 +21,7 @@ @pytest.fixture def app_environment( + disable_postgres_lifespan: None, disable_redis_lifespan: None, disable_rabbitmq_lifespan: None, disable_service_tracker_lifespan: None, diff --git a/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py b/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py index 12c355162c0..bdc5fe73fa3 100644 --- a/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py +++ b/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py @@ -20,6 +20,7 @@ @pytest.fixture def app_environment( + disable_postgres_lifespan: None, disable_redis_lifespan: None, disable_service_tracker_lifespan: None, disable_deferred_manager_lifespan: None, diff --git a/services/dynamic-scheduler/tests/unit/test_services_redis.py b/services/dynamic-scheduler/tests/unit/test_services_redis.py index be4952fbea6..54a8ad29cc7 100644 --- a/services/dynamic-scheduler/tests/unit/test_services_redis.py +++ b/services/dynamic-scheduler/tests/unit/test_services_redis.py @@ -15,6 +15,7 @@ @pytest.fixture def app_environment( + disable_postgres_lifespan: None, disable_rabbitmq_lifespan: None, disable_deferred_manager_lifespan: None, disable_notifier_lifespan: None,