diff --git a/.ruff.toml b/.ruff.toml index cba34d54b136..5100a4d23d2c 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -1,5 +1,5 @@ -select = [ +lint.select = [ "A", # [https://pypi.org/project/flake8-builtins/] "ARG", # [https://pypi.org/project/flake8-unused-arguments/] "ASYNC", # [https://pypi.org/project/flake8-async/] @@ -40,7 +40,7 @@ select = [ "W", # [https://pypi.org/project/pycodestyle/] warnings "YTT", # [https://pypi.org/project/flake8-2020/] ] -ignore = [ +lint.ignore = [ "E501", # line too long, handled by black "S101", # use of `assert` detected hanbled by pylance, does not support noseq "TID252", # [*] Relative imports from parent modules are banned @@ -50,7 +50,7 @@ ignore = [ target-version = "py311" -[per-file-ignores] +[lint.per-file-ignores] "**/{tests,pytest_simcore}/**" = [ "T201", # print found "ARG001", # unused function argument @@ -64,10 +64,10 @@ target-version = "py311" "FBT001", # Boolean positional arg in function definition ] -[flake8-pytest-style] +[lint.flake8-pytest-style] fixture-parentheses = false parametrize-names-type = "csv" -[pylint] +[lint.pylint] max-args = 10 diff --git a/packages/postgres-database/tests/conftest.py b/packages/postgres-database/tests/conftest.py index f9125cc2fcee..f89b9cbcc8bd 100644 --- a/packages/postgres-database/tests/conftest.py +++ b/packages/postgres-database/tests/conftest.py @@ -5,7 +5,7 @@ import uuid import warnings -from collections.abc import AsyncIterator, Awaitable, Callable, Iterator +from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Iterator from pathlib import Path import aiopg.sa @@ -13,11 +13,13 @@ import pytest import simcore_postgres_database.cli import sqlalchemy as sa +import sqlalchemy.engine import yaml from aiopg.sa.connection import SAConnection from aiopg.sa.engine import Engine from aiopg.sa.result import ResultProxy, RowProxy from faker import Faker +from pytest_simcore.helpers import postgres_tools from pytest_simcore.helpers.faker_factories import ( random_group, random_project, @@ -71,20 +73,15 @@ def postgres_service(docker_services, docker_ip, docker_compose_file) -> str: return dsn -@pytest.fixture -def make_engine( - postgres_service: str, -) -> Callable[[bool], Awaitable[Engine] | sa.engine.base.Engine]: - dsn = postgres_service - - def _make(is_async=True) -> Awaitable[Engine] | sa.engine.base.Engine: - return aiopg.sa.create_engine(dsn) if is_async else sa.create_engine(dsn) - - return _make +@pytest.fixture(scope="session") +def sync_engine(postgres_service: str) -> Iterable[sqlalchemy.engine.Engine]: + _engine: sqlalchemy.engine.Engine = sa.create_engine(url=postgres_service) + yield _engine + _engine.dispose() @pytest.fixture -def make_asyncpg_engine(postgres_service: str) -> Callable[[bool], AsyncEngine]: +def _make_asyncpg_engine(postgres_service: str) -> Callable[[bool], AsyncEngine]: # NOTE: users is responsible of `await engine.dispose()` dsn = postgres_service.replace("postgresql://", "postgresql+asyncpg://") minsize = 1 @@ -127,10 +124,10 @@ def db_metadata() -> sa.MetaData: @pytest.fixture(params=["sqlModels", "alembicMigration"]) def pg_sa_engine( - make_engine: Callable, + sync_engine: sqlalchemy.engine.Engine, db_metadata: sa.MetaData, request: pytest.FixtureRequest, -) -> Iterator[sa.engine.Engine]: +) -> Iterator[sqlalchemy.engine.Engine]: """ Runs migration to create tables and return a sqlalchemy engine @@ -144,7 +141,6 @@ def pg_sa_engine( # the tables, i.e. when no migration mechanism are in place # Best is therefore to start from scratch and delete all at # the end - sync_engine = make_engine(is_async=False) # NOTE: ALL is deleted before db_metadata.drop_all(sync_engine) @@ -165,22 +161,20 @@ def pg_sa_engine( yield sync_engine - # NOTE: ALL is deleted after - with sync_engine.begin() as conn: - conn.execute(sa.DDL("DROP TABLE IF EXISTS alembic_version")) - db_metadata.drop_all(sync_engine) - sync_engine.dispose() + postgres_tools.force_drop_all_tables(sync_engine) @pytest.fixture async def aiopg_engine( - pg_sa_engine: sa.engine.Engine, make_engine: Callable + pg_sa_engine: sqlalchemy.engine.Engine, + postgres_service: str, ) -> AsyncIterator[Engine]: """ Return an aiopg.sa engine connected to a responsive and migrated pg database """ - - aiopg_sa_engine = await make_engine(is_async=True) + # first start sync + assert pg_sa_engine.url.database + assert postgres_service.endswith(pg_sa_engine.url.database) warnings.warn( "The 'aiopg_engine' is deprecated since we are replacing `aiopg` library by `sqlalchemy.ext.asyncio`." @@ -190,12 +184,8 @@ async def aiopg_engine( stacklevel=2, ) - yield aiopg_sa_engine - - # closes async-engine connections and terminates - aiopg_sa_engine.close() - await aiopg_sa_engine.wait_closed() - aiopg_sa_engine.terminate() + async with aiopg.sa.create_engine(dsn=postgres_service) as aiopg_sa_engine: + yield aiopg_sa_engine @pytest.fixture @@ -208,15 +198,15 @@ async def connection(aiopg_engine: Engine) -> AsyncIterator[SAConnection]: @pytest.fixture async def asyncpg_engine( # <-- WE SHOULD USE THIS ONE is_pdb_enabled: bool, - pg_sa_engine: sa.engine.Engine, - make_asyncpg_engine: Callable[[bool], AsyncEngine], + pg_sa_engine: sqlalchemy.engine.Engine, + _make_asyncpg_engine: Callable[[bool], AsyncEngine], ) -> AsyncIterator[AsyncEngine]: assert ( pg_sa_engine ), "Ensures pg db up, responsive, init (w/ tables) and/or migrated" - _apg_engine = make_asyncpg_engine(is_pdb_enabled) + _apg_engine = _make_asyncpg_engine(is_pdb_enabled) yield _apg_engine @@ -229,9 +219,7 @@ async def asyncpg_engine( # <-- WE SHOULD USE THIS ONE @pytest.fixture -def create_fake_group( - make_engine: Callable[..., Awaitable[Engine] | sa.engine.base.Engine] -) -> Iterator[Callable]: +def create_fake_group(sync_engine: sqlalchemy.engine.Engine) -> Iterator[Callable]: """factory to create standard group""" created_ids = [] @@ -250,16 +238,13 @@ async def _creator(conn: SAConnection, **overrides) -> RowProxy: yield _creator - sync_engine = make_engine(is_async=False) - assert isinstance(sync_engine, sa.engine.Engine) + assert isinstance(sync_engine, sqlalchemy.engine.Engine) with sync_engine.begin() as conn: conn.execute(sa.delete(groups).where(groups.c.gid.in_(created_ids))) @pytest.fixture -def create_fake_user( - make_engine: Callable[..., Awaitable[Engine] | sa.engine.base.Engine] -) -> Iterator[Callable]: +def create_fake_user(sync_engine: sqlalchemy.engine.Engine) -> Iterator[Callable]: """factory to create a user w/ or w/o a standard group""" created_ids = [] @@ -290,8 +275,7 @@ async def _creator(conn, group: RowProxy | None = None, **overrides) -> RowProxy yield _creator - sync_engine = make_engine(is_async=False) - assert isinstance(sync_engine, sa.engine.Engine) + assert isinstance(sync_engine, sqlalchemy.engine.Engine) with sync_engine.begin() as conn: conn.execute(users.delete().where(users.c.id.in_(created_ids))) diff --git a/packages/postgres-database/tests/test_models_groups.py b/packages/postgres-database/tests/test_models_groups.py index 649e21118677..6ce8a77c4cc3 100644 --- a/packages/postgres-database/tests/test_models_groups.py +++ b/packages/postgres-database/tests/test_models_groups.py @@ -3,18 +3,14 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments - -from collections.abc import AsyncIterator, Awaitable, Callable +from collections.abc import Callable import aiopg.sa.exc import pytest -import sqlalchemy as sa from aiopg.sa.connection import SAConnection -from aiopg.sa.engine import Engine from aiopg.sa.result import ResultProxy, RowProxy from psycopg2.errors import ForeignKeyViolation, RaiseException, UniqueViolation from pytest_simcore.helpers.faker_factories import random_user -from simcore_postgres_database.models.base import metadata from simcore_postgres_database.webserver_models import ( GroupType, groups, @@ -24,21 +20,6 @@ from sqlalchemy import func, literal_column, select -@pytest.fixture -async def connection( - make_engine: Callable[[bool], Awaitable[Engine] | sa.engine.base.Engine] -) -> AsyncIterator[SAConnection]: - engine = await make_engine() - sync_engine = make_engine(is_async=False) - metadata.drop_all(sync_engine) - metadata.create_all(sync_engine) - - async with engine.acquire() as conn: - yield conn - - metadata.drop_all(sync_engine) - - async def test_user_group_uniqueness( connection: SAConnection, create_fake_group: Callable, diff --git a/packages/postgres-database/tests/test_uniqueness_in_comp_tasks.py b/packages/postgres-database/tests/test_uniqueness_in_comp_tasks.py index 80b9b28e5145..2935a0de45ad 100644 --- a/packages/postgres-database/tests/test_uniqueness_in_comp_tasks.py +++ b/packages/postgres-database/tests/test_uniqueness_in_comp_tasks.py @@ -1,13 +1,18 @@ -# pylint:disable=no-value-for-parameter -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments import json +from collections.abc import AsyncIterator +import aiopg.sa.engine +import aiopg.sa.exc import pytest import sqlalchemy as sa +import sqlalchemy.engine from psycopg2.errors import UniqueViolation # pylint: disable=no-name-in-module +from pytest_simcore.helpers import postgres_tools from pytest_simcore.helpers.faker_factories import fake_pipeline, fake_task_factory from simcore_postgres_database.models.base import metadata from simcore_postgres_database.webserver_models import comp_pipeline, comp_tasks @@ -16,13 +21,15 @@ @pytest.fixture -async def engine(make_engine): - engine = await make_engine() - sync_engine = make_engine(is_async=False) +async def engine( + sync_engine: sqlalchemy.engine.Engine, + aiopg_engine: aiopg.sa.engine.Engine, +) -> AsyncIterator[aiopg.sa.engine.Engine]: + metadata.drop_all(sync_engine) metadata.create_all(sync_engine) - async with engine.acquire() as conn: + async with aiopg_engine.acquire() as conn: await conn.execute( comp_pipeline.insert().values(**fake_pipeline(project_id="PA")) ) @@ -30,10 +37,9 @@ async def engine(make_engine): comp_pipeline.insert().values(**fake_pipeline(project_id="PB")) ) - yield engine + yield aiopg_engine - engine.close() - await engine.wait_closed() + postgres_tools.force_drop_all_tables(sync_engine) async def test_unique_project_node_pairs(engine): diff --git a/packages/postgres-database/tests/test_utils_migration.py b/packages/postgres-database/tests/test_utils_migration.py index 2fc6fe443ca4..d75006badd64 100644 --- a/packages/postgres-database/tests/test_utils_migration.py +++ b/packages/postgres-database/tests/test_utils_migration.py @@ -3,10 +3,10 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -from collections.abc import Callable import pytest import simcore_postgres_database.cli +import sqlalchemy.engine from alembic.script.revision import MultipleHeads from simcore_postgres_database.utils_migration import get_current_head from sqlalchemy import inspect @@ -23,8 +23,8 @@ def test_migration_has_no_branches(): ) -def test_migration_upgrade_downgrade(make_engine: Callable): - sync_engine = make_engine(is_async=False) +def test_migration_upgrade_downgrade(sync_engine: sqlalchemy.engine.Engine): + assert sync_engine assert simcore_postgres_database.cli.discover.callback assert simcore_postgres_database.cli.upgrade.callback diff --git a/packages/pytest-simcore/src/pytest_simcore/docker.py b/packages/pytest-simcore/src/pytest_simcore/docker.py index 89e88484a4b2..9ca883ec6c87 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker.py @@ -20,12 +20,16 @@ async def async_docker_client() -> AsyncIterator[aiodocker.Docker]: @contextlib.asynccontextmanager -async def _pause_container( +async def _pause_docker_container( async_docker_client: aiodocker.Docker, container_name: str ) -> AsyncIterator[None]: containers = await async_docker_client.containers.list( filters={"name": [f"{container_name}."]} ) + assert ( + containers + ), f"Failed to pause container {container_name=}, because it was not found" + await asyncio.gather(*(c.pause() for c in containers)) # refresh container_attrs = await asyncio.gather(*(c.show() for c in containers)) @@ -46,7 +50,7 @@ async def _pause_container( async def paused_container() -> Callable[[str], AbstractAsyncContextManager[None]]: @contextlib.asynccontextmanager async def _(container_name: str) -> AsyncIterator[None]: - async with aiodocker.Docker() as docker_client, _pause_container( + async with aiodocker.Docker() as docker_client, _pause_docker_container( docker_client, container_name ): yield None diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py index 3069f41b4f16..2085e62a3652 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py @@ -335,7 +335,9 @@ def _save_docker_logs_to_folder(failed_test_directory: Path): @pytest.hookimpl() -def pytest_exception_interact(node, call, report): +def pytest_exception_interact( + node, call: pytest.CallInfo[Any], report: pytest.CollectReport +): # get the node root dir (guaranteed to exist) root_directory: Path = Path(node.config.rootdir) failed_test_directory = root_directory / "test_failures" / node.name diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py b/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py index 5f1f6c6ffe44..1621713acb36 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py @@ -17,6 +17,22 @@ class PostgresTestConfig(TypedDict): port: str +def force_drop_all_tables(sa_sync_engine: sa.engine.Engine): + with sa_sync_engine.begin() as conn: + conn.execute(sa.DDL("DROP TABLE IF EXISTS alembic_version")) + conn.execute( + # NOTE: terminates all open transactions before droping all tables + # This solves https://github.com/ITISFoundation/osparc-simcore/issues/7008 + sa.DDL( + "SELECT pg_terminate_backend(pid) " + "FROM pg_stat_activity " + "WHERE state = 'idle in transaction';" + ) + ) + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/1776 + metadata.drop_all(bind=conn) + + @contextmanager def migrated_pg_tables_context( postgres_config: PostgresTestConfig, @@ -46,22 +62,15 @@ def migrated_pg_tables_context( # assert simcore_postgres_database.cli.downgrade.callback assert simcore_postgres_database.cli.clean.callback + simcore_postgres_database.cli.downgrade.callback("base") simcore_postgres_database.cli.clean.callback() # just cleans discover cache - postgres_engine = sa.create_engine(dsn) - with postgres_engine.begin() as conn: - conn.execute( - # NOTE: terminates all open transactions before droping all tables - # This solves https://github.com/ITISFoundation/osparc-simcore/issues/7008 - sa.DDL( - "SELECT pg_terminate_backend(pid) " - "FROM pg_stat_activity " - "WHERE state = 'idle in transaction';" - ) - ) - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/1776 - metadata.drop_all(bind=conn) + try: + sync_engine = sa.create_engine(dsn) + force_drop_all_tables(sync_engine) + finally: + sync_engine.dispose() def is_postgres_responsive(url) -> bool: diff --git a/packages/service-library/Makefile b/packages/service-library/Makefile index 16d494eafebc..edefcc08385c 100644 --- a/packages/service-library/Makefile +++ b/packages/service-library/Makefile @@ -107,7 +107,7 @@ test-dev[all]: ## runs unit tests w/ all extras .PHONY: test-ci[all] -test-ci[all]: ## runs unit tests w/ all extras +test-ci[all]: ## runs unit tests w/ all extras @pytest \ --asyncio-mode=auto \ --color=yes \ @@ -115,12 +115,12 @@ test-ci[all]: ## runs unit tests w/ all extras --cov-config=$(CURDIR)/../../.coveragerc \ --cov-report=term-missing \ --cov-report=xml \ - --junitxml=junit.xml -o junit_family=legacy \ --cov=$(APP_PACKAGE_NAME) \ --durations=10 \ + --junitxml=junit.xml -o junit_family=legacy \ --keep-docker-up \ --log-date-format="%Y-%m-%d %H:%M:%S" \ - --log-format="%(asctime)s %(levelname)s %(message)s" \ + --log-format="%(asctime)s %(levelname)s %(message)s" \ --verbose \ -m "not heavy_load" \ $(CURDIR)/tests diff --git a/packages/service-library/src/servicelib/redis/_client.py b/packages/service-library/src/servicelib/redis/_client.py index 38a1a5f6f319..c2a081541104 100644 --- a/packages/service-library/src/servicelib/redis/_client.py +++ b/packages/service-library/src/servicelib/redis/_client.py @@ -3,6 +3,7 @@ import logging from asyncio import Task from dataclasses import dataclass, field +from typing import Final from uuid import uuid4 import redis.asyncio as aioredis @@ -13,7 +14,7 @@ from ..async_utils import cancel_wait_task from ..background_task import periodic -from ..logging_utils import log_catch +from ..logging_utils import log_catch, log_context from ._constants import ( DEFAULT_DECODE_RESPONSES, DEFAULT_HEALTH_CHECK_INTERVAL, @@ -23,6 +24,9 @@ _logger = logging.getLogger(__name__) +# SEE https://github.com/ITISFoundation/osparc-simcore/pull/7077 +_HEALTHCHECK_TASK_TIMEOUT_S: Final[float] = 3.0 + @dataclass class RedisClientSDK: @@ -77,15 +81,22 @@ async def _periodic_check_health() -> None: ) async def shutdown(self) -> None: - if self._health_check_task: - assert self._health_check_task_started_event # nosec - await self._health_check_task_started_event.wait() # NOTE: wait for the health check task to have started once before we can cancel it - await cancel_wait_task(self._health_check_task) - - await self._client.aclose(close_connection_pool=True) + with log_context( + _logger, level=logging.DEBUG, msg=f"Shutdown RedisClientSDK {self}" + ): + if self._health_check_task: + assert self._health_check_task_started_event # nosec + # NOTE: wait for the health check task to have started once before we can cancel it + await self._health_check_task_started_event.wait() + await cancel_wait_task( + self._health_check_task, max_delay=_HEALTHCHECK_TASK_TIMEOUT_S + ) + + await self._client.aclose(close_connection_pool=True) async def ping(self) -> bool: with log_catch(_logger, reraise=False): + # NOTE: retry_* input parameters from aioredis.from_url do not apply for the ping call await self._client.ping() return True return False diff --git a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py index daccac23c61a..7d11d2571539 100644 --- a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py +++ b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py @@ -349,7 +349,7 @@ def __init__( self.paused_container = paused_container @contextlib.asynccontextmanager - async def _pause_container( + async def _paused_container( self, container_name: str, client: ClientWithPingProtocol ) -> AsyncIterator[None]: async with self.paused_container(container_name): @@ -374,7 +374,7 @@ async def _pause_container( @contextlib.asynccontextmanager async def pause_rabbit(self) -> AsyncIterator[None]: - async with self._pause_container("rabbit", self.rabbit_client): + async with self._paused_container("rabbit", self.rabbit_client): yield @contextlib.asynccontextmanager @@ -382,7 +382,7 @@ async def pause_redis(self) -> AsyncIterator[None]: # save db for clean restore point await self.redis_client.redis.save() - async with self._pause_container("redis", self.redis_client): + async with self._paused_container("redis", self.redis_client): yield diff --git a/scripts/common-service.Makefile b/scripts/common-service.Makefile index 138b9ae27fc7..57fb6e3b5b46 100644 --- a/scripts/common-service.Makefile +++ b/scripts/common-service.Makefile @@ -44,6 +44,7 @@ install-dev install-prod install-ci: _check_venv_active ## install app in develo .PHONY: test-dev-unit test-ci-unit test-dev-integration test-ci-integration test-dev TEST_PATH := $(if $(test-path),/$(patsubst tests/integration/%,%, $(patsubst tests/unit/%,%, $(patsubst %/,%,$(test-path)))),) + test-dev-unit test-ci-unit: _check_venv_active ## run app unit tests (specifying test-path can restrict to a folder) # Targets tests/unit folder @make --no-print-directory _run-$(subst -unit,,$@) target=$(CURDIR)/tests/unit$(TEST_PATH) @@ -146,11 +147,11 @@ _run-test-dev: _check_venv_active --cov-config=.coveragerc \ --cov-report=term-missing \ --cov-report=xml \ - --junitxml=junit.xml -o junit_family=legacy \ --cov=$(APP_PACKAGE_NAME) \ --durations=10 \ --exitfirst \ --failed-first \ + --junitxml=junit.xml -o junit_family=legacy \ --keep-docker-up \ --pdb \ -vv \ @@ -167,12 +168,12 @@ _run-test-ci: _check_venv_active --cov-config=.coveragerc \ --cov-report=term-missing \ --cov-report=xml \ - --junitxml=junit.xml -o junit_family=legacy \ --cov=$(APP_PACKAGE_NAME) \ --durations=10 \ + --junitxml=junit.xml -o junit_family=legacy \ --keep-docker-up \ --log-date-format="%Y-%m-%d %H:%M:%S" \ - --log-format="%(asctime)s %(levelname)s %(message)s" \ + --log-format="%(asctime)s %(levelname)s %(message)s" \ --verbose \ -m "not heavy_load" \ $(PYTEST_ADDITIONAL_PARAMETERS) \ diff --git a/services/api-server/tests/unit/_with_db/conftest.py b/services/api-server/tests/unit/_with_db/conftest.py index 98493ca0411f..dc160808f310 100644 --- a/services/api-server/tests/unit/_with_db/conftest.py +++ b/services/api-server/tests/unit/_with_db/conftest.py @@ -8,22 +8,22 @@ import shutil import subprocess import sys -from collections.abc import AsyncGenerator, AsyncIterator, Callable +from collections.abc import AsyncGenerator, AsyncIterator, Callable, Iterable from pathlib import Path +from typing import TypedDict -import aiopg.sa -import aiopg.sa.engine as aiopg_sa_engine import httpx import pytest import simcore_postgres_database.cli as pg_cli import sqlalchemy as sa -import sqlalchemy.engine as sa_engine +import sqlalchemy.engine import yaml from aiopg.sa.connection import SAConnection from fastapi import FastAPI from models_library.api_schemas_api_server.api_keys import ApiKeyInDB from pydantic import PositiveInt from pytest_mock import MockerFixture +from pytest_simcore.helpers import postgres_tools from pytest_simcore.helpers.faker_factories import ( random_api_key, random_product, @@ -32,7 +32,6 @@ from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_postgres_database.models.api_keys import api_keys -from simcore_postgres_database.models.base import metadata from simcore_postgres_database.models.products import products from simcore_postgres_database.models.users import users from simcore_service_api_server.core.application import init_app @@ -41,7 +40,9 @@ ## POSTGRES ----- -CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent +_CURRENT_DIR = ( + Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent +) @pytest.fixture(scope="session") @@ -54,7 +55,7 @@ def docker_compose_file( environ = dict(os.environ) environ.update(default_app_env_vars) - src_path = CURRENT_DIR / "data" / "docker-compose.yml" + src_path = _CURRENT_DIR / "data" / "docker-compose.yml" assert src_path.exists dst_path = Path(str(tmpdir_factory.mktemp("config").join("docker-compose.yml"))) @@ -73,8 +74,19 @@ def docker_compose_file( return dst_path +class PostgreServiceInfoDict(TypedDict): + dsn: str + user: str + password: str + host: str + port: int + datbase: str + + @pytest.fixture(scope="session") -def postgres_service(docker_services, docker_ip, docker_compose_file: Path) -> dict: +def postgres_service( + docker_services, docker_ip, docker_compose_file: Path +) -> PostgreServiceInfoDict: # check docker-compose's environ is resolved properly config = yaml.safe_load(docker_compose_file.read_text()) environ = config["services"]["postgres"]["environment"] @@ -110,23 +122,20 @@ def is_postgres_responsive() -> bool: ) config["dsn"] = dsn - return config + return PostgreServiceInfoDict(**config) @pytest.fixture(scope="session") -def make_engine(postgres_service: dict) -> Callable: - dsn = postgres_service["dsn"] # session scope freezes dsn - - def maker(*, is_async=True) -> aiopg_sa_engine.Engine | sa_engine.Engine: - if is_async: - return aiopg.sa.create_engine(dsn) - return sa.create_engine(dsn) - - return maker +def sync_engine( + postgres_service: PostgreServiceInfoDict, +) -> Iterable[sqlalchemy.engine.Engine]: + _engine: sqlalchemy.engine.Engine = sa.create_engine(url=postgres_service["dsn"]) + yield _engine + _engine.dispose() @pytest.fixture -def migrated_db(postgres_service: dict, make_engine: Callable): +def migrated_db(postgres_service: dict, sync_engine: sqlalchemy.engine.Engine): # NOTE: this is equivalent to packages/pytest-simcore/src/pytest_simcore/postgres_service.py::postgres_db # but we do override postgres_dsn -> postgres_engine -> postgres_db because we want the latter # fixture to have local scope @@ -140,9 +149,8 @@ def migrated_db(postgres_service: dict, make_engine: Callable): pg_cli.downgrade.callback("base") pg_cli.clean.callback() - # FIXME: deletes all because downgrade is not reliable! - engine = make_engine(is_async=False) - metadata.drop_all(engine) + + postgres_tools.force_drop_all_tables(sync_engine) @pytest.fixture diff --git a/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py b/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py index f42834051083..aa9f3c44e5bb 100644 --- a/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py +++ b/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py @@ -64,8 +64,6 @@ def app_environment( "STUDIES_ACCESS_ANONYMOUS_ALLOWED": "1", }, ) - # NOTE: To see logs, use pytest -s --log-cli-level=DEBUG - # setup_logging(level=logging.DEBUG) print(env_vars) return env_vars diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index 1aceea40a80c..d583e3c783e4 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -28,7 +28,6 @@ import redis import redis.asyncio as aioredis import simcore_postgres_database.cli as pg_cli -import simcore_service_webserver.db.models as orm import simcore_service_webserver.email import simcore_service_webserver.email._core import simcore_service_webserver.utils @@ -44,6 +43,7 @@ from pydantic import ByteSize, TypeAdapter from pytest_docker.plugin import Services from pytest_mock import MockerFixture +from pytest_simcore.helpers import postgres_tools from pytest_simcore.helpers.faker_factories import random_product from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -517,29 +517,17 @@ def postgres_db( assert pg_cli.upgrade.callback pg_cli.upgrade.callback("head") # Uses syncrounous engine for that - engine = sa.create_engine(url, isolation_level="AUTOCOMMIT") + sync_engine = sa.create_engine(url, isolation_level="AUTOCOMMIT") - yield engine + yield sync_engine - # NOTE: we directly drop the table, that is faster - # testing the upgrade/downgrade is already done in postgres-database. - # there is no need to it here. - with engine.begin() as conn: - conn.execute(sa.DDL("DROP TABLE IF EXISTS alembic_version")) - - conn.execute( - # NOTE: terminates all open transactions before droping all tables - # This solves https://github.com/ITISFoundation/osparc-simcore/issues/7008 - sa.DDL( - "SELECT pg_terminate_backend(pid) " - "FROM pg_stat_activity " - "WHERE state = 'idle in transaction';" - ) - ) - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/1776 - orm.metadata.drop_all(bind=conn) - - engine.dispose() + try: + # NOTE: we directly drop the table, that is faster + # testing the upgrade/downgrade is already done in postgres-database. + # there is no need to it here. + postgres_tools.force_drop_all_tables(sync_engine) + finally: + sync_engine.dispose() @pytest.fixture