diff --git a/packages/models-library/Makefile b/packages/models-library/Makefile index 6795fcd2610b..b41bafd2f2c7 100644 --- a/packages/models-library/Makefile +++ b/packages/models-library/Makefile @@ -110,14 +110,15 @@ erd-ServiceInput.svg: _erdantic DOWNLOADED_TEST_DATA_DIR = "$(CURDIR)/tests/data/.downloaded-ignore" .PHONY: _httpx -_httpx: _check_venv_active +_ensure_httpx: _check_venv_active # ensures requirements installed @python3 -c "import httpx" 2>/dev/null || uv pip install httpx -PHONY: pull_test_data -pull_test_data: $(DOT_ENV_FILE) _httpx ## downloads tests data from registry (this can take some time!) - # downloading all metadata files +PHONY: tests-data +tests-data: $(DOT_ENV_FILE) _ensure_httpx ## downloads tests data from registry defined in .env (time-intensive!) + # Downloading all metadata files ... @set -o allexport; \ source $<; \ set +o allexport; \ python3 "$(PACKAGES_DIR)/pytest-simcore/src/pytest_simcore/helpers/docker_registry.py" $(DOWNLOADED_TEST_DATA_DIR) + @echo "Run now 'pytest -vv -m diagnostics tests'" diff --git a/packages/notifications-library/src/notifications_library/templates/on_account_form.email.content.html b/packages/notifications-library/src/notifications_library/templates/on_account_form.email.content.html index 96230e800008..edf5f1a640cc 100644 --- a/packages/notifications-library/src/notifications_library/templates/on_account_form.email.content.html +++ b/packages/notifications-library/src/notifications_library/templates/on_account_form.email.content.html @@ -4,7 +4,7 @@
Dear Support team
- We have received the following request form for an account in {{ product.display_name }} from {{ host }} + We have received the following request form for an account in {{ product.display_name }} from {{ host }}
diff --git a/packages/notifications-library/src/notifications_library/templates/on_account_form.email.content.txt b/packages/notifications-library/src/notifications_library/templates/on_account_form.email.content.txt
index 596ac7d01e5f..0eb9d7d4a641 100644
--- a/packages/notifications-library/src/notifications_library/templates/on_account_form.email.content.txt
+++ b/packages/notifications-library/src/notifications_library/templates/on_account_form.email.content.txt
@@ -1,6 +1,6 @@
Dear Support team,
-We have received the following request form for an account in {{ product.display_name }} from {{ host }}:
+We have received the following request form for an account in {{ product.display_name }} from **{{ host }}**:
{{ dumps(request_form) }}
diff --git a/packages/postgres-database/src/simcore_postgres_database/base_repo.py b/packages/postgres-database/src/simcore_postgres_database/base_repo.py
new file mode 100644
index 000000000000..fdee0721e07f
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/base_repo.py
@@ -0,0 +1,43 @@
+import logging
+from collections.abc import AsyncIterator
+from contextlib import asynccontextmanager
+
+from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine
+
+_logger = logging.getLogger(__name__)
+
+
+@asynccontextmanager
+async def get_or_create_connection(
+ engine: AsyncEngine, connection: AsyncConnection | None = None
+) -> AsyncIterator[AsyncConnection]:
+ # NOTE: When connection is passed, the engine is actually not needed
+ # NOTE: Creator is responsible of closing connection
+ is_connection_created = connection is None
+ if is_connection_created:
+ connection = await engine.connect()
+ try:
+ assert connection # nosec
+ yield connection
+ finally:
+ assert connection # nosec
+ assert not connection.closed # nosec
+ if is_connection_created and connection:
+ await connection.close()
+
+
+@asynccontextmanager
+async def transaction_context(
+ engine: AsyncEngine, connection: AsyncConnection | None = None
+):
+ async with get_or_create_connection(engine, connection) as conn:
+ if conn.in_transaction():
+ async with conn.begin_nested(): # inner transaction (savepoint)
+ yield conn
+ else:
+ try:
+ async with conn.begin(): # outer transaction (savepoint)
+ yield conn
+ finally:
+ assert not conn.closed # nosec
+ assert not conn.in_transaction() # nosec
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/products.py b/packages/postgres-database/src/simcore_postgres_database/models/products.py
index 913c929e04c5..03e137528ecd 100644
--- a/packages/postgres-database/src/simcore_postgres_database/models/products.py
+++ b/packages/postgres-database/src/simcore_postgres_database/models/products.py
@@ -41,8 +41,6 @@ class Vendor(TypedDict, total=False):
invitation_url: str # How to request a trial invitation? (if applies)
invitation_form: bool # If True, it takes precendence over invitation_url and asks the FE to show the form (if defined)
- has_landing_page: bool # Is Landing page enabled
-
release_notes_url_template: str # a template url where `{vtag}` will be replaced, eg: "http://example.com/{vtag}.md"
diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_tags.py b/packages/postgres-database/src/simcore_postgres_database/utils_tags.py
index 0a8b3e4ac28a..1057ffeb2884 100644
--- a/packages/postgres-database/src/simcore_postgres_database/utils_tags.py
+++ b/packages/postgres-database/src/simcore_postgres_database/utils_tags.py
@@ -1,12 +1,11 @@
""" Repository pattern, errors and data structures for models.tags
"""
-import itertools
-from dataclasses import dataclass
from typing import TypedDict
-from aiopg.sa.connection import SAConnection
+from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine
+from .base_repo import get_or_create_connection, transaction_context
from .utils_tags_sql import (
count_users_with_access_rights_stmt,
create_tag_stmt,
@@ -49,15 +48,16 @@ class TagDict(TypedDict, total=True):
delete: bool
-@dataclass(frozen=True)
class TagsRepo:
- user_id: int # Determines access-rights
+ def __init__(self, engine: AsyncEngine):
+ self.engine = engine
async def access_count(
self,
- conn: SAConnection,
- tag_id: int,
+ connection: AsyncConnection | None = None,
*,
+ user_id: int,
+ tag_id: int,
read: bool | None = None,
write: bool | None = None,
delete: bool | None = None,
@@ -66,11 +66,12 @@ async def access_count(
Returns 0 if tag does not match access
Returns >0 if it does and represents the number of groups granting this access to the user
"""
- count_stmt = count_users_with_access_rights_stmt(
- user_id=self.user_id, tag_id=tag_id, read=read, write=write, delete=delete
- )
- permissions_count: int | None = await conn.scalar(count_stmt)
- return permissions_count if permissions_count else 0
+ async with get_or_create_connection(self.engine, connection) as conn:
+ count_stmt = count_users_with_access_rights_stmt(
+ user_id=user_id, tag_id=tag_id, read=read, write=write, delete=delete
+ )
+ permissions_count: int | None = await conn.scalar(count_stmt)
+ return permissions_count if permissions_count else 0
#
# CRUD operations
@@ -78,8 +79,9 @@ async def access_count(
async def create(
self,
- conn: SAConnection,
+ connection: AsyncConnection | None = None,
*,
+ user_id: int,
name: str,
color: str,
description: str | None = None, # =nullable
@@ -94,69 +96,127 @@ async def create(
if description:
values["description"] = description
- async with conn.begin():
+ async with transaction_context(self.engine, connection) as conn:
# insert new tag
insert_stmt = create_tag_stmt(**values)
result = await conn.execute(insert_stmt)
- tag = await result.first()
+ tag = result.first()
assert tag # nosec
# take tag ownership
access_stmt = set_tag_access_rights_stmt(
tag_id=tag.id,
- user_id=self.user_id,
+ user_id=user_id,
read=read,
write=write,
delete=delete,
)
result = await conn.execute(access_stmt)
- access = await result.first()
- assert access
-
- return TagDict(itertools.chain(tag.items(), access.items())) # type: ignore
-
- async def list_all(self, conn: SAConnection) -> list[TagDict]:
- stmt_list = list_tags_stmt(user_id=self.user_id)
- return [TagDict(row.items()) async for row in conn.execute(stmt_list)] # type: ignore
+ access = result.first()
+ assert access # nosec
+
+ return TagDict(
+ id=tag.id,
+ name=tag.name,
+ description=tag.description,
+ color=tag.color,
+ read=access.read,
+ write=access.write,
+ delete=access.delete,
+ )
- async def get(self, conn: SAConnection, tag_id: int) -> TagDict:
- stmt_get = get_tag_stmt(user_id=self.user_id, tag_id=tag_id)
- result = await conn.execute(stmt_get)
- row = await result.first()
- if not row:
- msg = f"{tag_id=} not found: either no access or does not exists"
- raise TagNotFoundError(msg)
- return TagDict(row.items()) # type: ignore
+ async def list_all(
+ self,
+ connection: AsyncConnection | None = None,
+ *,
+ user_id: int,
+ ) -> list[TagDict]:
+ async with get_or_create_connection(self.engine, connection) as conn:
+ stmt_list = list_tags_stmt(user_id=user_id)
+ result = await conn.stream(stmt_list)
+ return [
+ TagDict(
+ id=row.id,
+ name=row.name,
+ description=row.description,
+ color=row.color,
+ read=row.read,
+ write=row.write,
+ delete=row.delete,
+ )
+ async for row in result
+ ]
+
+ async def get(
+ self,
+ connection: AsyncConnection | None = None,
+ *,
+ user_id: int,
+ tag_id: int,
+ ) -> TagDict:
+ stmt_get = get_tag_stmt(user_id=user_id, tag_id=tag_id)
+ async with get_or_create_connection(self.engine, connection) as conn:
+ result = await conn.execute(stmt_get)
+ row = result.first()
+ if not row:
+ msg = f"{tag_id=} not found: either no access or does not exists"
+ raise TagNotFoundError(msg)
+ return TagDict(
+ id=row.id,
+ name=row.name,
+ description=row.description,
+ color=row.color,
+ read=row.read,
+ write=row.write,
+ delete=row.delete,
+ )
async def update(
self,
- conn: SAConnection,
+ connection: AsyncConnection | None = None,
+ *,
+ user_id: int,
tag_id: int,
**fields,
) -> TagDict:
- updates = {
- name: value
- for name, value in fields.items()
- if name in {"name", "color", "description"}
- }
-
- if not updates:
- # no updates == get
- return await self.get(conn, tag_id=tag_id)
-
- update_stmt = update_tag_stmt(user_id=self.user_id, tag_id=tag_id, **updates)
- result = await conn.execute(update_stmt)
- row = await result.first()
- if not row:
- msg = f"{tag_id=} not updated: either no access or not found"
- raise TagOperationNotAllowedError(msg)
-
- return TagDict(row.items()) # type: ignore
-
- async def delete(self, conn: SAConnection, tag_id: int) -> None:
- stmt_delete = delete_tag_stmt(user_id=self.user_id, tag_id=tag_id)
+ async with transaction_context(self.engine, connection) as conn:
+ updates = {
+ name: value
+ for name, value in fields.items()
+ if name in {"name", "color", "description"}
+ }
+
+ if not updates:
+ # no updates == get
+ return await self.get(conn, user_id=user_id, tag_id=tag_id)
+
+ update_stmt = update_tag_stmt(user_id=user_id, tag_id=tag_id, **updates)
+ result = await conn.execute(update_stmt)
+ row = result.first()
+ if not row:
+ msg = f"{tag_id=} not updated: either no access or not found"
+ raise TagOperationNotAllowedError(msg)
+
+ return TagDict(
+ id=row.id,
+ name=row.name,
+ description=row.description,
+ color=row.color,
+ read=row.read,
+ write=row.write,
+ delete=row.delete,
+ )
- deleted = await conn.scalar(stmt_delete)
- if not deleted:
- msg = f"Could not delete {tag_id=}. Not found or insuficient access."
- raise TagOperationNotAllowedError(msg)
+ async def delete(
+ self,
+ connection: AsyncConnection | None = None,
+ *,
+ user_id: int,
+ tag_id: int,
+ ) -> None:
+ stmt_delete = delete_tag_stmt(user_id=user_id, tag_id=tag_id)
+ async with transaction_context(self.engine, connection) as conn:
+ deleted = await conn.scalar(stmt_delete)
+ if not deleted:
+ msg = f"Could not delete {tag_id=}. Not found or insuficient access."
+ raise TagOperationNotAllowedError(msg)
diff --git a/packages/postgres-database/tests/conftest.py b/packages/postgres-database/tests/conftest.py
index 0d2224e286ee..5526b668e398 100644
--- a/packages/postgres-database/tests/conftest.py
+++ b/packages/postgres-database/tests/conftest.py
@@ -4,6 +4,7 @@
# pylint: disable=unused-variable
import uuid
+import warnings
from collections.abc import AsyncIterator, Awaitable, Callable, Iterator
from pathlib import Path
@@ -37,6 +38,7 @@
user_to_groups,
users,
)
+from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
pytest_plugins = [
"pytest_simcore.pytest_global_environs",
@@ -81,6 +83,30 @@ def _make(is_async=True) -> Awaitable[Engine] | sa.engine.base.Engine:
return _make
+@pytest.fixture
+def make_asyncpg_engine(postgres_service: str) -> Callable[[bool], AsyncEngine]:
+ # NOTE: users is responsible of `await engine.dispose()`
+ dsn = postgres_service.replace("postgresql://", "postgresql+asyncpg://")
+ minsize = 1
+ maxsize = 50
+
+ def _(echo: bool):
+ engine: AsyncEngine = create_async_engine(
+ dsn,
+ pool_size=minsize,
+ max_overflow=maxsize - minsize,
+ connect_args={
+ "server_settings": {"application_name": "postgres_database_tests"}
+ },
+ pool_pre_ping=True, # https://docs.sqlalchemy.org/en/14/core/pooling.html#dealing-with-disconnects
+ future=True, # this uses sqlalchemy 2.0 API, shall be removed when sqlalchemy 2.0 is released
+ echo=echo,
+ )
+ return engine
+
+ return _
+
+
def is_postgres_responsive(dsn) -> bool:
"""Check if something responds to ``url``"""
try:
@@ -107,6 +133,11 @@ def pg_sa_engine(
) -> Iterator[sa.engine.Engine]:
"""
Runs migration to create tables and return a sqlalchemy engine
+
+ NOTE: use this fixture to ensure pg db:
+ - up,
+ - responsive,
+ - init (w/ tables) and/or migrated
"""
# NOTE: Using migration to upgrade/downgrade is not
# such a great idea since these tests are used while developing
@@ -142,29 +173,56 @@ def pg_sa_engine(
@pytest.fixture
-async def pg_engine(
+async def aiopg_engine(
pg_sa_engine: sa.engine.Engine, make_engine: Callable
) -> AsyncIterator[Engine]:
"""
Return an aiopg.sa engine connected to a responsive and migrated pg database
"""
- async_engine = await make_engine(is_async=True)
- yield async_engine
+ aiopg_sa_engine = await make_engine(is_async=True)
+
+ warnings.warn(
+ "The 'aiopg_engine' is deprecated since we are replacing `aiopg` library by `sqlalchemy.ext.asyncio`."
+ "SEE https://github.com/ITISFoundation/osparc-simcore/issues/4529. "
+ "Please use 'asyncpg_engine' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ yield aiopg_sa_engine
# closes async-engine connections and terminates
- async_engine.close()
- await async_engine.wait_closed()
- async_engine.terminate()
+ aiopg_sa_engine.close()
+ await aiopg_sa_engine.wait_closed()
+ aiopg_sa_engine.terminate()
@pytest.fixture
-async def connection(pg_engine: Engine) -> AsyncIterator[SAConnection]:
+async def connection(aiopg_engine: Engine) -> AsyncIterator[SAConnection]:
"""Returns an aiopg.sa connection from an engine to a fully furnished and ready pg database"""
- async with pg_engine.acquire() as _conn:
+ async with aiopg_engine.acquire() as _conn:
yield _conn
+@pytest.fixture
+async def asyncpg_engine(
+ is_pdb_enabled: bool,
+ pg_sa_engine: sa.engine.Engine,
+ make_asyncpg_engine: Callable[[bool], AsyncEngine],
+) -> AsyncIterator[AsyncEngine]:
+
+ assert (
+ pg_sa_engine
+ ), "Ensures pg db up, responsive, init (w/ tables) and/or migrated"
+
+ _apg_engine = make_asyncpg_engine(is_pdb_enabled)
+
+ yield _apg_engine
+
+ await _apg_engine.dispose()
+
+
#
# FACTORY FIXTURES
#
@@ -240,7 +298,7 @@ async def _creator(conn, group: RowProxy | None = None, **overrides) -> RowProxy
@pytest.fixture
async def create_fake_cluster(
- pg_engine: Engine, faker: Faker
+ aiopg_engine: Engine, faker: Faker
) -> AsyncIterator[Callable[..., Awaitable[int]]]:
cluster_ids = []
assert cluster_to_groups is not None
@@ -254,7 +312,7 @@ async def _creator(**overrides) -> int:
"authentication": faker.pydict(value_types=[str]),
}
insert_values.update(overrides)
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
cluster_id = await conn.scalar(
clusters.insert().values(**insert_values).returning(clusters.c.id)
)
@@ -265,13 +323,13 @@ async def _creator(**overrides) -> int:
yield _creator
# cleanup
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
await conn.execute(clusters.delete().where(clusters.c.id.in_(cluster_ids)))
@pytest.fixture
async def create_fake_project(
- pg_engine: Engine,
+ aiopg_engine: Engine,
) -> AsyncIterator[Callable[..., Awaitable[RowProxy]]]:
created_project_uuids = []
@@ -288,7 +346,7 @@ async def _creator(conn, user: RowProxy, **overrides) -> RowProxy:
yield _creator
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
await conn.execute(
projects.delete().where(projects.c.uuid.in_(created_project_uuids))
)
diff --git a/packages/postgres-database/tests/products/test_models_products.py b/packages/postgres-database/tests/products/test_models_products.py
index 02d9a6076190..c385cd7e7340 100644
--- a/packages/postgres-database/tests/products/test_models_products.py
+++ b/packages/postgres-database/tests/products/test_models_products.py
@@ -26,14 +26,14 @@
async def test_load_products(
- pg_engine: Engine, make_products_table: Callable, products_regex: dict
+ aiopg_engine: Engine, make_products_table: Callable, products_regex: dict
):
exclude = {
products.c.created,
products.c.modified,
}
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
await make_products_table(conn)
stmt = sa.select(*[c for c in products.columns if c not in exclude])
@@ -49,14 +49,14 @@ async def test_load_products(
async def test_jinja2_templates_table(
- pg_engine: Engine, osparc_simcore_services_dir: Path
+ aiopg_engine: Engine, osparc_simcore_services_dir: Path
):
templates_common_dir = (
osparc_simcore_services_dir
/ "web/server/src/simcore_service_webserver/templates/common"
)
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
templates = []
# templates table
for p in templates_common_dir.glob("*.jinja2"):
@@ -135,7 +135,7 @@ async def test_jinja2_templates_table(
async def test_insert_select_product(
- pg_engine: Engine,
+ aiopg_engine: Engine,
):
osparc_product = {
"name": "osparc",
@@ -174,7 +174,7 @@ async def test_insert_select_product(
print(json.dumps(osparc_product))
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
# writes
stmt = (
pg_insert(products)
diff --git a/packages/postgres-database/tests/products/test_utils_products.py b/packages/postgres-database/tests/products/test_utils_products.py
index 3956d74e2262..a1b84fe96dd8 100644
--- a/packages/postgres-database/tests/products/test_utils_products.py
+++ b/packages/postgres-database/tests/products/test_utils_products.py
@@ -19,24 +19,24 @@
)
-async def test_default_product(pg_engine: Engine, make_products_table: Callable):
- async with pg_engine.acquire() as conn:
+async def test_default_product(aiopg_engine: Engine, make_products_table: Callable):
+ async with aiopg_engine.acquire() as conn:
await make_products_table(conn)
default_product = await get_default_product_name(conn)
assert default_product == "s4l"
@pytest.mark.parametrize("pg_sa_engine", ["sqlModels"], indirect=True)
-async def test_default_product_undefined(pg_engine: Engine):
- async with pg_engine.acquire() as conn:
+async def test_default_product_undefined(aiopg_engine: Engine):
+ async with aiopg_engine.acquire() as conn:
with pytest.raises(ValueError):
await get_default_product_name(conn)
async def test_get_or_create_group_product(
- pg_engine: Engine, make_products_table: Callable
+ aiopg_engine: Engine, make_products_table: Callable
):
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
await make_products_table(conn)
async for product_row in await conn.execute(
@@ -105,13 +105,13 @@ async def test_get_or_create_group_product(
reason="Not relevant. Will review in https://github.com/ITISFoundation/osparc-simcore/issues/3754"
)
async def test_get_or_create_group_product_concurrent(
- pg_engine: Engine, make_products_table: Callable
+ aiopg_engine: Engine, make_products_table: Callable
):
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
await make_products_table(conn)
async def _auto_create_products_groups():
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
async for product_row in await conn.execute(
sa.select(products.c.name, products.c.group_id).order_by(
products.c.priority
diff --git a/packages/postgres-database/tests/projects/conftest.py b/packages/postgres-database/tests/projects/conftest.py
index fb507557fbf2..2a1b9c99f245 100644
--- a/packages/postgres-database/tests/projects/conftest.py
+++ b/packages/postgres-database/tests/projects/conftest.py
@@ -16,10 +16,10 @@
@pytest.fixture
-async def user(pg_engine: Engine) -> RowProxy:
+async def user(aiopg_engine: Engine) -> RowProxy:
_USERNAME = f"{__name__}.me"
# some user
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
result: ResultProxy | None = await conn.execute(
users.insert().values(**random_user(name=_USERNAME)).returning(users)
)
@@ -32,10 +32,10 @@ async def user(pg_engine: Engine) -> RowProxy:
@pytest.fixture
-async def project(pg_engine: Engine, user: RowProxy) -> RowProxy:
+async def project(aiopg_engine: Engine, user: RowProxy) -> RowProxy:
_PARENT_PROJECT_NAME = f"{__name__}.parent"
# a user's project
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
result: ResultProxy | None = await conn.execute(
projects.insert()
.values(**random_project(prj_owner=user.id, name=_PARENT_PROJECT_NAME))
@@ -50,6 +50,6 @@ async def project(pg_engine: Engine, user: RowProxy) -> RowProxy:
@pytest.fixture
-async def conn(pg_engine: Engine) -> AsyncIterable[SAConnection]:
- async with pg_engine.acquire() as conn:
+async def conn(aiopg_engine: Engine) -> AsyncIterable[SAConnection]:
+ async with aiopg_engine.acquire() as conn:
yield conn
diff --git a/packages/postgres-database/tests/test_base_repo.py b/packages/postgres-database/tests/test_base_repo.py
new file mode 100644
index 000000000000..080b7ad31214
--- /dev/null
+++ b/packages/postgres-database/tests/test_base_repo.py
@@ -0,0 +1,213 @@
+# pylint: disable=redefined-outer-name
+# pylint: disable=unused-argument
+# pylint: disable=unused-variable
+# pylint: disable=too-many-arguments
+
+
+from typing import Any, NamedTuple
+
+import pytest
+import sqlalchemy as sa
+from simcore_postgres_database.base_repo import (
+ get_or_create_connection,
+ transaction_context,
+)
+from simcore_postgres_database.models.tags import tags
+from sqlalchemy.exc import IntegrityError
+from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine
+
+
+async def test_sa_transactions(asyncpg_engine: AsyncEngine):
+ #
+ # SEE https://docs.sqlalchemy.org/en/20/orm/extensions/asyncio.html#synopsis-core
+ #
+
+ # READ query
+ total_count_query = sa.select(sa.func.count()).select_from(tags)
+
+ # WRITE queries
+ query1 = (
+ tags.insert().values(id=2, name="query1", color="blue").returning(tags.c.id)
+ )
+ query11 = (
+ tags.insert().values(id=3, name="query11", color="blue").returning(tags.c.id)
+ )
+ query12 = (
+ tags.insert().values(id=5, name="query12", color="blue").returning(tags.c.id)
+ )
+ query2 = (
+ tags.insert().values(id=6, name="query2", color="blue").returning(tags.c.id)
+ )
+ query2 = (
+ tags.insert().values(id=7, name="query2", color="blue").returning(tags.c.id)
+ )
+
+ async with asyncpg_engine.connect() as conn, conn.begin(): # starts transaction (savepoint)
+
+ result = await conn.execute(query1)
+ assert result.scalar() == 2
+
+ total_count = (await conn.execute(total_count_query)).scalar()
+ assert total_count == 1
+
+ rows = (await conn.execute(tags.select().where(tags.c.id == 2))).fetchall()
+ assert rows
+ assert rows[0].id == 2
+
+ async with conn.begin_nested(): # savepoint
+ await conn.execute(query11)
+
+ with pytest.raises(IntegrityError):
+ async with conn.begin_nested(): # savepoint
+ await conn.execute(query11)
+
+ await conn.execute(query12)
+
+ total_count = (await conn.execute(total_count_query)).scalar()
+ assert total_count == 3 # since query11 (second time) reverted!
+
+ await conn.execute(query2)
+
+ total_count = (await conn.execute(total_count_query)).scalar()
+ assert total_count == 4
+
+
+class _PageTuple(NamedTuple):
+ total_count: int
+ rows: list[dict[str, Any]]
+
+
+class OneResourceRepoDemo:
+ # This is a PROTOTYPE of how one could implement a generic
+ # repo that provides CRUD operations providing a given table
+ def __init__(self, engine: AsyncEngine, table: sa.Table):
+ if "id" not in table.columns:
+ msg = "id column expected"
+ raise ValueError(msg)
+ self.table = table
+
+ self.engine = engine
+
+ async def create(self, connection: AsyncConnection | None = None, **kwargs) -> int:
+ async with transaction_context(self.engine, connection) as conn:
+ result = await conn.execute(self.table.insert().values(**kwargs))
+ assert result # nosec
+ return result.inserted_primary_key[0]
+
+ async def get_by_id(
+ self,
+ connection: AsyncConnection | None = None,
+ *,
+ row_id: int,
+ ) -> dict[str, Any] | None:
+ async with get_or_create_connection(self.engine, connection) as conn:
+ result = await conn.execute(
+ sa.select(self.table).where(self.table.c.id == row_id)
+ )
+ row = result.mappings().fetchone()
+ return dict(row) if row else None
+
+ async def get_page(
+ self,
+ connection: AsyncConnection | None = None,
+ *,
+ limit: int,
+ offset: int = 0,
+ ) -> _PageTuple:
+ async with get_or_create_connection(self.engine, connection) as conn:
+ # Compute total count
+ total_count_query = sa.select(sa.func.count()).select_from(self.table)
+ total_count_result = await conn.execute(total_count_query)
+ total_count = total_count_result.scalar()
+
+ # Fetch paginated results
+ query = sa.select(self.table).limit(limit).offset(offset)
+ result = await conn.execute(query)
+ rows = [dict(row) for row in result.mappings().fetchall()]
+
+ return _PageTuple(total_count=total_count or 0, rows=rows)
+
+ async def update(
+ self,
+ connection: AsyncConnection | None = None,
+ *,
+ row_id: int,
+ **values,
+ ) -> bool:
+ async with transaction_context(self.engine, connection) as conn:
+ result = await conn.execute(
+ self.table.update().where(self.table.c.id == row_id).values(**values)
+ )
+ return result.rowcount > 0
+
+ async def delete(
+ self,
+ connection: AsyncConnection | None = None,
+ *,
+ row_id: int,
+ ) -> bool:
+ async with transaction_context(self.engine, connection) as conn:
+ result = await conn.execute(
+ self.table.delete().where(self.table.c.id == row_id)
+ )
+ return result.rowcount > 0
+
+
+async def test_oneresourcerepodemo_prototype(asyncpg_engine: AsyncEngine):
+
+ tags_repo = OneResourceRepoDemo(engine=asyncpg_engine, table=tags)
+
+ # create
+ tag_id = await tags_repo.create(name="cyan tag", color="cyan")
+ assert tag_id > 0
+
+ # get, list
+ tag = await tags_repo.get_by_id(row_id=tag_id)
+ assert tag
+
+ page = await tags_repo.get_page(limit=10)
+ assert page.total_count == 1
+ assert page.rows == [tag]
+
+ # update
+ ok = await tags_repo.update(row_id=tag_id, name="changed name")
+ assert ok
+
+ updated_tag = await tags_repo.get_by_id(row_id=tag_id)
+ assert updated_tag
+ assert updated_tag["name"] != tag["name"]
+
+ # delete
+ ok = await tags_repo.delete(row_id=tag_id)
+ assert ok
+
+ assert not await tags_repo.get_by_id(row_id=tag_id)
+
+
+async def test_transaction_context(asyncpg_engine: AsyncEngine):
+ # (1) Using transaction_context and fails
+ fake_error_msg = "some error"
+
+ def _something_raises_here():
+ raise RuntimeError(fake_error_msg)
+
+ tags_repo = OneResourceRepoDemo(engine=asyncpg_engine, table=tags)
+
+ # using external transaction_context: commits upon __aexit__
+ async with transaction_context(asyncpg_engine) as conn:
+ await tags_repo.create(conn, name="cyan tag", color="cyan")
+ await tags_repo.create(conn, name="red tag", color="red")
+ assert (await tags_repo.get_page(conn, limit=10, offset=0)).total_count == 2
+
+ # using internal: auto-commit
+ await tags_repo.create(name="red tag", color="red")
+ assert (await tags_repo.get_page(limit=10, offset=0)).total_count == 3
+
+ # auto-rollback
+ with pytest.raises(RuntimeError, match=fake_error_msg): # noqa: PT012
+ async with transaction_context(asyncpg_engine) as conn:
+ await tags_repo.create(conn, name="violet tag", color="violet")
+ assert (await tags_repo.get_page(conn, limit=10, offset=0)).total_count == 4
+ _something_raises_here()
+
+ assert (await tags_repo.get_page(limit=10, offset=0)).total_count == 3
diff --git a/packages/postgres-database/tests/test_classifiers.py b/packages/postgres-database/tests/test_classifiers.py
index f53740a124df..8e8e0eba24cb 100644
--- a/packages/postgres-database/tests/test_classifiers.py
+++ b/packages/postgres-database/tests/test_classifiers.py
@@ -38,10 +38,10 @@ def classifiers_bundle(web_client_resource_folder: Path) -> dict:
async def test_operations_on_group_classifiers(
- pg_engine: Engine, classifiers_bundle: dict
+ aiopg_engine: Engine, classifiers_bundle: dict
):
# NOTE: mostly for TDD
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
# creates a group
stmt = (
groups.insert()
diff --git a/packages/postgres-database/tests/test_clusters.py b/packages/postgres-database/tests/test_clusters.py
index 6dccc8ef153c..95cd8492965f 100644
--- a/packages/postgres-database/tests/test_clusters.py
+++ b/packages/postgres-database/tests/test_clusters.py
@@ -16,8 +16,8 @@
@pytest.fixture
-async def user_id(pg_engine: Engine) -> AsyncIterable[int]:
- async with pg_engine.acquire() as conn:
+async def user_id(aiopg_engine: Engine) -> AsyncIterable[int]:
+ async with aiopg_engine.acquire() as conn:
# a 'me' user
uid = await conn.scalar(
users.insert().values(**(random_user())).returning(users.c.id)
@@ -25,14 +25,14 @@ async def user_id(pg_engine: Engine) -> AsyncIterable[int]:
assert uid is not None
yield uid
# cleanup
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
# a 'me' user
uid = await conn.execute(users.delete().where(users.c.id == uid))
@pytest.fixture
-async def user_group_id(pg_engine: Engine, user_id: int) -> int:
- async with pg_engine.acquire() as conn:
+async def user_group_id(aiopg_engine: Engine, user_id: int) -> int:
+ async with aiopg_engine.acquire() as conn:
primary_gid = await conn.scalar(
sa.select(users.c.primary_gid).where(users.c.id == user_id)
)
@@ -64,34 +64,34 @@ async def test_can_create_cluster_with_owner(
async def test_cannot_remove_owner_that_owns_cluster(
- pg_engine: Engine,
+ aiopg_engine: Engine,
user_id: int,
user_group_id: int,
create_fake_cluster: Callable[..., Awaitable[int]],
):
cluster_id = await create_fake_cluster(owner=user_group_id)
# now try removing the user
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
with pytest.raises(ForeignKeyViolation):
await conn.execute(users.delete().where(users.c.id == user_id))
# now remove the cluster
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
await conn.execute(clusters.delete().where(clusters.c.id == cluster_id))
# removing the user should work now
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
await conn.execute(users.delete().where(users.c.id == user_id))
async def test_cluster_owner_has_all_rights(
- pg_engine: Engine,
+ aiopg_engine: Engine,
user_group_id: int,
create_fake_cluster: Callable[..., Awaitable[int]],
):
cluster_id = await create_fake_cluster(owner=user_group_id)
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
result: ResultProxy = await conn.execute(
cluster_to_groups.select().where(
cluster_to_groups.c.cluster_id == cluster_id
diff --git a/packages/postgres-database/tests/test_comp_tasks.py b/packages/postgres-database/tests/test_comp_tasks.py
index 46152e30a69f..4c36260f4318 100644
--- a/packages/postgres-database/tests/test_comp_tasks.py
+++ b/packages/postgres-database/tests/test_comp_tasks.py
@@ -19,8 +19,8 @@
@pytest.fixture()
-async def db_connection(pg_engine: Engine) -> SAConnection:
- async with pg_engine.acquire() as conn:
+async def db_connection(aiopg_engine: Engine) -> SAConnection:
+ async with aiopg_engine.acquire() as conn:
yield conn
diff --git a/packages/postgres-database/tests/test_delete_projects_and_users.py b/packages/postgres-database/tests/test_delete_projects_and_users.py
index b0e0edacef84..4c3801560664 100644
--- a/packages/postgres-database/tests/test_delete_projects_and_users.py
+++ b/packages/postgres-database/tests/test_delete_projects_and_users.py
@@ -15,8 +15,8 @@
@pytest.fixture
-async def engine(pg_engine: Engine):
- async with pg_engine.acquire() as conn:
+async def engine(aiopg_engine: Engine):
+ async with aiopg_engine.acquire() as conn:
await conn.execute(users.insert().values(**random_user(name="A")))
await conn.execute(users.insert().values(**random_user()))
await conn.execute(users.insert().values(**random_user()))
@@ -27,7 +27,7 @@ async def engine(pg_engine: Engine):
with pytest.raises(ForeignKeyViolation):
await conn.execute(projects.insert().values(**random_project(prj_owner=4)))
- return pg_engine
+ return aiopg_engine
@pytest.mark.skip(reason="sandbox for dev purposes")
diff --git a/packages/postgres-database/tests/test_services_consume_filetypes.py b/packages/postgres-database/tests/test_services_consume_filetypes.py
index 88c68dadc7c2..f72799299073 100644
--- a/packages/postgres-database/tests/test_services_consume_filetypes.py
+++ b/packages/postgres-database/tests/test_services_consume_filetypes.py
@@ -59,9 +59,9 @@ async def _make(connection: SAConnection):
@pytest.fixture
async def connection(
- pg_engine: sa.engine.Engine, connection: SAConnection, make_table: Callable
+ aiopg_engine: sa.engine.Engine, connection: SAConnection, make_table: Callable
):
- assert pg_engine
+ assert aiopg_engine
# NOTE: do not remove th pg_engine, or the test will fail as pytest
# cannot set the parameters in the fixture
diff --git a/packages/postgres-database/tests/test_utils_aiopg_orm.py b/packages/postgres-database/tests/test_utils_aiopg_orm.py
index d34ef15f95ac..2905a3f3a87b 100644
--- a/packages/postgres-database/tests/test_utils_aiopg_orm.py
+++ b/packages/postgres-database/tests/test_utils_aiopg_orm.py
@@ -16,12 +16,12 @@
@pytest.fixture
-async def fake_scicrunch_ids(pg_engine: Engine) -> list[str]:
+async def fake_scicrunch_ids(aiopg_engine: Engine) -> list[str]:
row1 = {"rrid": "RRID:foo", "name": "foo", "description": "fooing"}
row2 = {"rrid": "RRID:bar", "name": "bar", "description": "barring"}
row_ids = []
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
for row in (row1, row2):
row_id = await conn.scalar(
scicrunch_resources.insert()
@@ -35,7 +35,7 @@ async def fake_scicrunch_ids(pg_engine: Engine) -> list[str]:
@pytest.fixture()
-async def scicrunch_orm(pg_engine: Engine) -> Iterator[BaseOrm[str]]:
+async def scicrunch_orm(aiopg_engine: Engine) -> Iterator[BaseOrm[str]]:
# This is a table without dependencies and therefore easy to use as fixture
class ScicrunchOrm(BaseOrm[str]):
def __init__(self, connection: SAConnection):
@@ -46,7 +46,7 @@ def __init__(self, connection: SAConnection):
writeonce={"rrid"},
)
- async with pg_engine.acquire() as conn:
+ async with aiopg_engine.acquire() as conn:
orm_obj = ScicrunchOrm(conn)
yield orm_obj
diff --git a/packages/postgres-database/tests/test_utils_projects_nodes.py b/packages/postgres-database/tests/test_utils_projects_nodes.py
index a20083608dd7..33e5b86b7cb2 100644
--- a/packages/postgres-database/tests/test_utils_projects_nodes.py
+++ b/packages/postgres-database/tests/test_utils_projects_nodes.py
@@ -309,7 +309,7 @@ async def test_delete_project_delete_all_nodes(
@pytest.mark.parametrize("num_concurrent_workflows", [1, 250])
async def test_multiple_creation_deletion_of_nodes(
- pg_engine: Engine,
+ aiopg_engine: Engine,
registered_user: RowProxy,
create_fake_project: Callable[..., Awaitable[RowProxy]],
create_fake_projects_node: Callable[..., ProjectNodeCreate],
@@ -318,7 +318,7 @@ async def test_multiple_creation_deletion_of_nodes(
NUM_NODES = 11
async def _workflow() -> None:
- async with pg_engine.acquire() as connection:
+ async with aiopg_engine.acquire() as connection:
project = await create_fake_project(connection, registered_user)
projects_nodes_repo = ProjectNodesRepo(project_uuid=project.uuid)
@@ -341,7 +341,7 @@ async def _workflow() -> None:
async def test_get_project_id_from_node_id(
- pg_engine: Engine,
+ aiopg_engine: Engine,
connection: SAConnection,
projects_nodes_repo: ProjectNodesRepo,
registered_user: RowProxy,
@@ -351,7 +351,7 @@ async def test_get_project_id_from_node_id(
NUM_NODES = 11
async def _workflow() -> dict[uuid.UUID, list[uuid.UUID]]:
- async with pg_engine.acquire() as connection:
+ async with aiopg_engine.acquire() as connection:
project = await create_fake_project(connection, registered_user)
projects_nodes_repo = ProjectNodesRepo(project_uuid=project.uuid)
@@ -379,7 +379,7 @@ async def _workflow() -> dict[uuid.UUID, list[uuid.UUID]]:
async def test_get_project_id_from_node_id_raises_for_invalid_node_id(
- pg_engine: Engine,
+ aiopg_engine: Engine,
connection: SAConnection,
projects_nodes_repo: ProjectNodesRepo,
faker: Faker,
@@ -393,7 +393,7 @@ async def test_get_project_id_from_node_id_raises_for_invalid_node_id(
async def test_get_project_id_from_node_id_raises_if_multiple_projects_with_same_node_id_exist(
- pg_engine: Engine,
+ aiopg_engine: Engine,
connection: SAConnection,
projects_nodes_repo: ProjectNodesRepo,
registered_user: RowProxy,
diff --git a/packages/postgres-database/tests/test_utils_tags.py b/packages/postgres-database/tests/test_utils_tags.py
index 2b99c1939fe6..21ac92749a39 100644
--- a/packages/postgres-database/tests/test_utils_tags.py
+++ b/packages/postgres-database/tests/test_utils_tags.py
@@ -30,6 +30,7 @@
set_tag_access_rights_stmt,
update_tag_stmt,
)
+from sqlalchemy.ext.asyncio import AsyncEngine
@pytest.fixture
@@ -75,7 +76,11 @@ async def other_user(
async def test_tags_access_with_primary_groups(
- connection: SAConnection, user: RowProxy, group: RowProxy, other_user: RowProxy
+ asyncpg_engine: AsyncEngine,
+ connection: SAConnection,
+ user: RowProxy,
+ group: RowProxy,
+ other_user: RowProxy,
):
conn = connection
@@ -102,22 +107,29 @@ async def test_tags_access_with_primary_groups(
),
]
- tags_repo = TagsRepo(user_id=user.id)
+ tags_repo = TagsRepo(asyncpg_engine)
# repo has access
assert (
- await tags_repo.access_count(conn, tag_id, read=True, write=True, delete=True)
+ await tags_repo.access_count(
+ user_id=user.id, tag_id=tag_id, read=True, write=True, delete=True
+ )
+ == 1
+ )
+ assert (
+ await tags_repo.access_count(
+ user_id=user.id, tag_id=tag_id, read=True, write=True
+ )
== 1
)
- assert await tags_repo.access_count(conn, tag_id, read=True, write=True) == 1
- assert await tags_repo.access_count(conn, tag_id, read=True) == 1
- assert await tags_repo.access_count(conn, tag_id, write=True) == 1
+ assert await tags_repo.access_count(user_id=user.id, tag_id=tag_id, read=True) == 1
+ assert await tags_repo.access_count(user_id=user.id, tag_id=tag_id, write=True) == 1
# changing access conditions
assert (
await tags_repo.access_count(
- conn,
- tag_id,
+ user_id=user.id,
+ tag_id=tag_id,
read=True,
write=True,
delete=False, # <---
@@ -128,15 +140,20 @@ async def test_tags_access_with_primary_groups(
# user will have NO access to other user's tags even matching access rights
assert (
await tags_repo.access_count(
- conn, other_tag_id, read=True, write=True, delete=True
+ user_id=user.id, tag_id=other_tag_id, read=True, write=True, delete=True
)
== 0
)
async def test_tags_access_with_multiple_groups(
- connection: SAConnection, user: RowProxy, group: RowProxy, other_user: RowProxy
+ asyncpg_engine: AsyncEngine,
+ connection: SAConnection,
+ user: RowProxy,
+ group: RowProxy,
+ other_user: RowProxy,
):
+
conn = connection
(tag_id, other_tag_id, group_tag_id, everyone_tag_id) = [
@@ -182,30 +199,58 @@ async def test_tags_access_with_multiple_groups(
),
]
- tags_repo = TagsRepo(user_id=user.id)
- other_repo = TagsRepo(user_id=other_user.id)
+ tags_repo = TagsRepo(asyncpg_engine)
+ other_repo = TagsRepo(asyncpg_engine)
# tag_id
assert (
- await tags_repo.access_count(conn, tag_id, read=True, write=True, delete=True)
+ await tags_repo.access_count(
+ user_id=user.id, tag_id=tag_id, read=True, write=True, delete=True
+ )
== 1
)
assert (
- await other_repo.access_count(conn, tag_id, read=True, write=True, delete=True)
+ await other_repo.access_count(
+ user_id=other_user.id, tag_id=tag_id, read=True, write=True, delete=True
+ )
== 0
)
# other_tag_id
- assert await tags_repo.access_count(conn, other_tag_id, read=True) == 0
- assert await other_repo.access_count(conn, other_tag_id, read=True) == 1
+ assert (
+ await tags_repo.access_count(user_id=user.id, tag_id=other_tag_id, read=True)
+ == 0
+ )
+ assert (
+ await other_repo.access_count(
+ user_id=other_user.id, tag_id=other_tag_id, read=True
+ )
+ == 1
+ )
# group_tag_id
- assert await tags_repo.access_count(conn, group_tag_id, read=True) == 1
- assert await other_repo.access_count(conn, group_tag_id, read=True) == 0
+ assert (
+ await tags_repo.access_count(user_id=user.id, tag_id=group_tag_id, read=True)
+ == 1
+ )
+ assert (
+ await other_repo.access_count(
+ user_id=other_user.id, tag_id=group_tag_id, read=True
+ )
+ == 0
+ )
# everyone_tag_id
- assert await tags_repo.access_count(conn, everyone_tag_id, read=True) == 1
- assert await other_repo.access_count(conn, everyone_tag_id, read=True) == 1
+ assert (
+ await tags_repo.access_count(user_id=user.id, tag_id=everyone_tag_id, read=True)
+ == 1
+ )
+ assert (
+ await other_repo.access_count(
+ user_id=other_user.id, tag_id=everyone_tag_id, read=True
+ )
+ == 1
+ )
# now group adds read for all tags
for t in (tag_id, other_tag_id, everyone_tag_id):
@@ -218,19 +263,29 @@ async def test_tags_access_with_multiple_groups(
delete=False,
)
- assert await tags_repo.access_count(conn, tag_id, read=True) == 2
- assert await tags_repo.access_count(conn, other_tag_id, read=True) == 1
- assert await tags_repo.access_count(conn, everyone_tag_id, read=True) == 2
+ assert await tags_repo.access_count(user_id=user.id, tag_id=tag_id, read=True) == 2
+ assert (
+ await tags_repo.access_count(user_id=user.id, tag_id=other_tag_id, read=True)
+ == 1
+ )
+ assert (
+ await tags_repo.access_count(user_id=user.id, tag_id=everyone_tag_id, read=True)
+ == 2
+ )
async def test_tags_repo_list_and_get(
- connection: SAConnection, user: RowProxy, group: RowProxy, other_user: RowProxy
+ asyncpg_engine: AsyncEngine,
+ connection: SAConnection,
+ user: RowProxy,
+ group: RowProxy,
+ other_user: RowProxy,
):
conn = connection
- tags_repo = TagsRepo(user_id=user.id)
+ tags_repo = TagsRepo(asyncpg_engine)
# (1) no tags
- listed_tags = await tags_repo.list_all(conn)
+ listed_tags = await tags_repo.list_all(user_id=user.id)
assert not listed_tags
# (2) one tag
@@ -247,7 +302,7 @@ async def test_tags_repo_list_and_get(
)
]
- listed_tags = await tags_repo.list_all(conn)
+ listed_tags = await tags_repo.list_all(user_id=user.id)
assert listed_tags
assert [t["id"] for t in listed_tags] == expected_tags_ids
@@ -265,7 +320,7 @@ async def test_tags_repo_list_and_get(
)
)
- listed_tags = await tags_repo.list_all(conn)
+ listed_tags = await tags_repo.list_all(user_id=user.id)
assert {t["id"] for t in listed_tags} == set(expected_tags_ids)
# (4) add another tag from a differnt user
@@ -282,7 +337,7 @@ async def test_tags_repo_list_and_get(
# same as before
prev_listed_tags = listed_tags
- listed_tags = await tags_repo.list_all(conn)
+ listed_tags = await tags_repo.list_all(user_id=user.id)
assert listed_tags == prev_listed_tags
# (5) add a global tag
@@ -297,7 +352,7 @@ async def test_tags_repo_list_and_get(
delete=False,
)
- listed_tags = await tags_repo.list_all(conn)
+ listed_tags = await tags_repo.list_all(user_id=user.id)
assert listed_tags == [
{
"id": 1,
@@ -328,8 +383,8 @@ async def test_tags_repo_list_and_get(
},
]
- other_repo = TagsRepo(user_id=other_user.id)
- assert await other_repo.list_all(conn) == [
+ other_repo = TagsRepo(asyncpg_engine)
+ assert await other_repo.list_all(user_id=other_user.id) == [
{
"id": 3,
"name": "T3",
@@ -351,7 +406,7 @@ async def test_tags_repo_list_and_get(
]
# exclusive to user
- assert await tags_repo.get(conn, tag_id=2) == {
+ assert await tags_repo.get(user_id=user.id, tag_id=2) == {
"id": 2,
"name": "T2",
"description": "tag via std group",
@@ -363,9 +418,9 @@ async def test_tags_repo_list_and_get(
# exclusive ot other user
with pytest.raises(TagNotFoundError):
- assert await tags_repo.get(conn, tag_id=3)
+ assert await tags_repo.get(user_id=user.id, tag_id=3)
- assert await other_repo.get(conn, tag_id=3) == {
+ assert await other_repo.get(user_id=other_user.id, tag_id=3) == {
"id": 3,
"name": "T3",
"description": "tag for 2",
@@ -376,14 +431,20 @@ async def test_tags_repo_list_and_get(
}
# a common tag
- assert await tags_repo.get(conn, tag_id=4) == await other_repo.get(conn, tag_id=4)
+ assert await tags_repo.get(user_id=user.id, tag_id=4) == await other_repo.get(
+ user_id=user.id, tag_id=4
+ )
async def test_tags_repo_update(
- connection: SAConnection, user: RowProxy, group: RowProxy, other_user: RowProxy
+ asyncpg_engine: AsyncEngine,
+ connection: SAConnection,
+ user: RowProxy,
+ group: RowProxy,
+ other_user: RowProxy,
):
conn = connection
- tags_repo = TagsRepo(user_id=user.id)
+ tags_repo = TagsRepo(asyncpg_engine)
# Tags with different access rights
readonly_tid, readwrite_tid, other_tid = [
@@ -420,10 +481,12 @@ async def test_tags_repo_update(
]
with pytest.raises(TagOperationNotAllowedError):
- await tags_repo.update(conn, tag_id=readonly_tid, description="modified")
+ await tags_repo.update(
+ user_id=user.id, tag_id=readonly_tid, description="modified"
+ )
assert await tags_repo.update(
- conn, tag_id=readwrite_tid, description="modified"
+ user_id=user.id, tag_id=readwrite_tid, description="modified"
) == {
"id": readwrite_tid,
"name": "T2",
@@ -435,14 +498,20 @@ async def test_tags_repo_update(
}
with pytest.raises(TagOperationNotAllowedError):
- await tags_repo.update(conn, tag_id=other_tid, description="modified")
+ await tags_repo.update(
+ user_id=user.id, tag_id=other_tid, description="modified"
+ )
async def test_tags_repo_delete(
- connection: SAConnection, user: RowProxy, group: RowProxy, other_user: RowProxy
+ asyncpg_engine: AsyncEngine,
+ connection: SAConnection,
+ user: RowProxy,
+ group: RowProxy,
+ other_user: RowProxy,
):
conn = connection
- tags_repo = TagsRepo(user_id=user.id)
+ tags_repo = TagsRepo(asyncpg_engine)
# Tags with different access rights
readonly_tid, delete_tid, other_tid = [
@@ -480,28 +549,32 @@ async def test_tags_repo_delete(
# cannot delete
with pytest.raises(TagOperationNotAllowedError):
- await tags_repo.delete(conn, tag_id=readonly_tid)
+ await tags_repo.delete(user_id=user.id, tag_id=readonly_tid)
# can delete
- await tags_repo.get(conn, tag_id=delete_tid)
- await tags_repo.delete(conn, tag_id=delete_tid)
+ await tags_repo.get(user_id=user.id, tag_id=delete_tid)
+ await tags_repo.delete(user_id=user.id, tag_id=delete_tid)
with pytest.raises(TagNotFoundError):
- await tags_repo.get(conn, tag_id=delete_tid)
+ await tags_repo.get(user_id=user.id, tag_id=delete_tid)
# cannot delete
with pytest.raises(TagOperationNotAllowedError):
- await tags_repo.delete(conn, tag_id=other_tid)
+ await tags_repo.delete(user_id=user.id, tag_id=other_tid)
async def test_tags_repo_create(
- connection: SAConnection, user: RowProxy, group: RowProxy, other_user: RowProxy
+ asyncpg_engine: AsyncEngine,
+ connection: SAConnection,
+ user: RowProxy,
+ group: RowProxy,
+ other_user: RowProxy,
):
conn = connection
- tags_repo = TagsRepo(user_id=user.id)
+ tags_repo = TagsRepo(asyncpg_engine)
tag_1 = await tags_repo.create(
- conn,
+ user_id=user.id,
name="T1",
description="my first tag",
color="pink",
diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py
index bc415ab31618..096b392fce66 100644
--- a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py
+++ b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py
@@ -235,7 +235,6 @@ def random_product(
license_url=fake.url(),
invitation_url=fake.url(),
invitation_form=fake.boolean(),
- has_landing_page=fake.boolean(),
address=fake.address().replace("\n", ". "),
),
"registration_email_template": registration_email_template,
diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_workspaces.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_workspaces.py
index 3d1f33ab0291..10a916d218ad 100644
--- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_workspaces.py
+++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_workspaces.py
@@ -5,7 +5,7 @@
from simcore_postgres_database.models.workspaces_access_rights import (
workspaces_access_rights,
)
-from simcore_service_webserver.db.plugin import get_database_engine
+from simcore_service_webserver.db.plugin import get_aiopg_engine
from sqlalchemy.dialects.postgresql import insert as pg_insert
@@ -18,7 +18,7 @@ async def update_or_insert_workspace_group(
write: bool,
delete: bool,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
insert_stmt = pg_insert(workspaces_access_rights).values(
workspace_id=workspace_id,
gid=group_id,
diff --git a/packages/service-library/src/servicelib/aiohttp/application_keys.py b/packages/service-library/src/servicelib/aiohttp/application_keys.py
index f98423bad05c..3958c860cb00 100644
--- a/packages/service-library/src/servicelib/aiohttp/application_keys.py
+++ b/packages/service-library/src/servicelib/aiohttp/application_keys.py
@@ -21,7 +21,7 @@
APP_CONFIG_KEY: Final[str] = f"{__name__ }.config"
APP_SETTINGS_KEY: Final[str] = f"{__name__ }.settings"
-APP_DB_ENGINE_KEY: Final[str] = f"{__name__ }.db_engine"
+APP_AIOPG_ENGINE_KEY: Final[str] = f"{__name__ }.aiopg_engine"
APP_CLIENT_SESSION_KEY: Final[str] = f"{__name__ }.session"
diff --git a/packages/service-library/src/servicelib/aiohttp/db_asyncpg_engine.py b/packages/service-library/src/servicelib/aiohttp/db_asyncpg_engine.py
new file mode 100644
index 000000000000..2ca9d431075b
--- /dev/null
+++ b/packages/service-library/src/servicelib/aiohttp/db_asyncpg_engine.py
@@ -0,0 +1,74 @@
+"""
+Helpers on asyncpg specific for aiohttp
+
+SEE migration aiopg->asyncpg https://github.com/ITISFoundation/osparc-simcore/issues/4529
+"""
+
+
+import logging
+from typing import Final
+
+from aiohttp import web
+from servicelib.logging_utils import log_context
+from settings_library.postgres import PostgresSettings
+from simcore_postgres_database.utils_aiosqlalchemy import ( # type: ignore[import-not-found] # this on is unclear
+ get_pg_engine_stateinfo,
+)
+from sqlalchemy.ext.asyncio import AsyncEngine
+
+from ..db_asyncpg_utils import create_async_engine_and_pg_database_ready
+from ..logging_utils import log_context
+
+APP_DB_ASYNC_ENGINE_KEY: Final[str] = f"{__name__ }.AsyncEngine"
+
+
+_logger = logging.getLogger(__name__)
+
+
+def _set_async_engine_to_app_state(app: web.Application, engine: AsyncEngine):
+ if exists := app.get(APP_DB_ASYNC_ENGINE_KEY, None):
+ msg = f"An instance of {type(exists)} already in app[{APP_DB_ASYNC_ENGINE_KEY}]={exists}"
+ raise ValueError(msg)
+
+ app[APP_DB_ASYNC_ENGINE_KEY] = engine
+ return get_async_engine(app)
+
+
+def get_async_engine(app: web.Application) -> AsyncEngine:
+ engine: AsyncEngine = app[APP_DB_ASYNC_ENGINE_KEY]
+ assert engine # nosec
+ return engine
+
+
+async def connect_to_db(app: web.Application, settings: PostgresSettings) -> None:
+ """
+ - db services up, data migrated and ready to use
+ - sets an engine in app state (use `get_async_engine(app)` to retrieve)
+ """
+ if settings.POSTGRES_CLIENT_NAME:
+ settings = settings.copy(
+ update={"POSTGRES_CLIENT_NAME": settings.POSTGRES_CLIENT_NAME + "-asyncpg"}
+ )
+
+ with log_context(
+ _logger,
+ logging.INFO,
+ "Connecting app[APP_DB_ASYNC_ENGINE_KEY] to postgres with %s",
+ f"{settings=}",
+ ):
+ engine = await create_async_engine_and_pg_database_ready(settings)
+ _set_async_engine_to_app_state(app, engine)
+
+ _logger.info(
+ "app[APP_DB_ASYNC_ENGINE_KEY] ready : %s",
+ await get_pg_engine_stateinfo(engine),
+ )
+
+
+async def close_db_connection(app: web.Application) -> None:
+ engine = get_async_engine(app)
+ with log_context(
+ _logger, logging.DEBUG, f"app[APP_DB_ASYNC_ENGINE_KEY] disconnect of {engine}"
+ ):
+ if engine:
+ await engine.dispose()
diff --git a/packages/service-library/src/servicelib/db_async_engine.py b/packages/service-library/src/servicelib/db_async_engine.py
deleted file mode 100644
index cff73e770475..000000000000
--- a/packages/service-library/src/servicelib/db_async_engine.py
+++ /dev/null
@@ -1,52 +0,0 @@
-import logging
-
-from fastapi import FastAPI
-from settings_library.postgres import PostgresSettings
-from simcore_postgres_database.utils_aiosqlalchemy import ( # type: ignore[import-not-found] # this on is unclear
- get_pg_engine_stateinfo,
- raise_if_migration_not_ready,
-)
-from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
-from tenacity import retry
-
-from .logging_utils import log_context
-from .retry_policies import PostgresRetryPolicyUponInitialization
-
-_logger = logging.getLogger(__name__)
-
-
-@retry(**PostgresRetryPolicyUponInitialization(_logger).kwargs)
-async def connect_to_db(app: FastAPI, settings: PostgresSettings) -> None:
- with log_context(
- _logger, logging.DEBUG, f"connection to db {settings.dsn_with_async_sqlalchemy}"
- ):
- engine: AsyncEngine = create_async_engine(
- settings.dsn_with_async_sqlalchemy,
- pool_size=settings.POSTGRES_MINSIZE,
- max_overflow=settings.POSTGRES_MAXSIZE - settings.POSTGRES_MINSIZE,
- connect_args={
- "server_settings": {"application_name": settings.POSTGRES_CLIENT_NAME}
- },
- pool_pre_ping=True, # https://docs.sqlalchemy.org/en/14/core/pooling.html#dealing-with-disconnects
- future=True, # this uses sqlalchemy 2.0 API, shall be removed when sqlalchemy 2.0 is released
- )
-
- with log_context(_logger, logging.DEBUG, "migration"):
- try:
- await raise_if_migration_not_ready(engine)
- except Exception:
- # NOTE: engine must be closed because retry will create a new engine
- await engine.dispose()
- raise
-
- app.state.engine = engine
- _logger.debug(
- "Setup engine: %s",
- await get_pg_engine_stateinfo(engine),
- )
-
-
-async def close_db_connection(app: FastAPI) -> None:
- with log_context(_logger, logging.DEBUG, f"db disconnect of {app.state.engine}"):
- if engine := app.state.engine:
- await engine.dispose()
diff --git a/packages/service-library/src/servicelib/db_asyncpg_utils.py b/packages/service-library/src/servicelib/db_asyncpg_utils.py
new file mode 100644
index 000000000000..e2ae14cc2651
--- /dev/null
+++ b/packages/service-library/src/servicelib/db_asyncpg_utils.py
@@ -0,0 +1,59 @@
+import logging
+import time
+from datetime import timedelta
+
+from models_library.healthchecks import IsNonResponsive, IsResponsive, LivenessResult
+from settings_library.postgres import PostgresSettings
+from simcore_postgres_database.utils_aiosqlalchemy import ( # type: ignore[import-not-found] # this on is unclear
+ raise_if_migration_not_ready,
+)
+from sqlalchemy.exc import SQLAlchemyError
+from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
+from tenacity import retry
+
+from .retry_policies import PostgresRetryPolicyUponInitialization
+
+_logger = logging.getLogger(__name__)
+
+
+@retry(**PostgresRetryPolicyUponInitialization(_logger).kwargs)
+async def create_async_engine_and_pg_database_ready(
+ settings: PostgresSettings,
+) -> AsyncEngine:
+ """
+ - creates asyncio engine
+ - waits until db service is up
+ - waits until db data is migrated (i.e. ready to use)
+ - returns engine
+ """
+ engine: AsyncEngine = create_async_engine(
+ settings.dsn_with_async_sqlalchemy,
+ pool_size=settings.POSTGRES_MINSIZE,
+ max_overflow=settings.POSTGRES_MAXSIZE - settings.POSTGRES_MINSIZE,
+ connect_args={
+ "server_settings": {"application_name": settings.POSTGRES_CLIENT_NAME}
+ },
+ pool_pre_ping=True, # https://docs.sqlalchemy.org/en/14/core/pooling.html#dealing-with-disconnects
+ future=True, # this uses sqlalchemy 2.0 API, shall be removed when sqlalchemy 2.0 is released
+ )
+
+ try:
+ await raise_if_migration_not_ready(engine)
+ except Exception:
+ # NOTE: engine must be closed because retry will create a new engine
+ await engine.dispose()
+ raise
+
+ return engine
+
+
+async def check_postgres_liveness(engine: AsyncEngine) -> LivenessResult:
+ try:
+ tic = time.time()
+ # test
+ async with engine.connect():
+ ...
+ elapsed_time = time.time() - tic
+ return IsResponsive(elapsed=timedelta(seconds=elapsed_time))
+ except SQLAlchemyError as err:
+ return IsNonResponsive(reason=f"{err}")
diff --git a/packages/service-library/src/servicelib/fastapi/db_asyncpg_engine.py b/packages/service-library/src/servicelib/fastapi/db_asyncpg_engine.py
new file mode 100644
index 000000000000..a45e5dc2145d
--- /dev/null
+++ b/packages/service-library/src/servicelib/fastapi/db_asyncpg_engine.py
@@ -0,0 +1,33 @@
+import logging
+
+from fastapi import FastAPI
+from settings_library.postgres import PostgresSettings
+from simcore_postgres_database.utils_aiosqlalchemy import ( # type: ignore[import-not-found] # this on is unclear
+ get_pg_engine_stateinfo,
+)
+
+from ..db_asyncpg_utils import create_async_engine_and_pg_database_ready
+from ..logging_utils import log_context
+
+_logger = logging.getLogger(__name__)
+
+
+async def connect_to_db(app: FastAPI, settings: PostgresSettings) -> None:
+ with log_context(
+ _logger,
+ logging.DEBUG,
+ f"Connecting and migraging {settings.dsn_with_async_sqlalchemy}",
+ ):
+ engine = await create_async_engine_and_pg_database_ready(settings)
+
+ app.state.engine = engine
+ _logger.debug(
+ "Setup engine: %s",
+ await get_pg_engine_stateinfo(engine),
+ )
+
+
+async def close_db_connection(app: FastAPI) -> None:
+ with log_context(_logger, logging.DEBUG, f"db disconnect of {app.state.engine}"):
+ if engine := app.state.engine:
+ await engine.dispose()
diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py
index f8335bbeed2b..7724aba99dc4 100644
--- a/packages/settings-library/src/settings_library/postgres.py
+++ b/packages/settings-library/src/settings_library/postgres.py
@@ -1,6 +1,6 @@
-import urllib.parse
from functools import cached_property
from typing import Any, ClassVar
+from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse
from pydantic import Field, PostgresDsn, SecretStr, validator
@@ -75,11 +75,23 @@ def dsn_with_async_sqlalchemy(self) -> str:
def dsn_with_query(self) -> str:
"""Some clients do not support queries in the dsn"""
dsn = self.dsn
+ return self._update_query(dsn)
+
+ def _update_query(self, uri: str) -> str:
+ # SEE https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS
+ new_params: dict[str, str] = {}
if self.POSTGRES_CLIENT_NAME:
- dsn += "?" + urllib.parse.urlencode(
- {"application_name": self.POSTGRES_CLIENT_NAME}
- )
- return dsn
+ new_params = {
+ "application_name": self.POSTGRES_CLIENT_NAME,
+ }
+
+ if new_params:
+ parsed_uri = urlparse(uri)
+ query = dict(parse_qsl(parsed_uri.query))
+ query.update(new_params)
+ updated_query = urlencode(query)
+ return urlunparse(parsed_uri._replace(query=updated_query))
+ return uri
class Config(BaseCustomSettings.Config):
schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc]
diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py
index 8cf3eadc30ff..fddad2f62122 100644
--- a/packages/settings-library/tests/test__pydantic_settings.py
+++ b/packages/settings-library/tests/test__pydantic_settings.py
@@ -15,6 +15,7 @@
from pydantic import BaseSettings, validator
from pydantic.fields import ModelField, Undefined
+from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict
def assert_field_specs(
@@ -50,9 +51,8 @@ class Settings(BaseSettings):
@classmethod
def parse_none(cls, v, values, field: ModelField):
# WARNING: In nullable fields, envs equal to null or none are parsed as None !!
- if field.allow_none:
- if isinstance(v, str) and v.lower() in ("null", "none"):
- return None
+ if field.allow_none and isinstance(v, str) and v.lower() in ("null", "none"):
+ return None
return v
@@ -132,15 +132,21 @@ def test_fields_declarations():
def test_construct(monkeypatch):
# from __init__
settings_from_init = Settings(
- VALUE=1, VALUE_ALSO_REQUIRED=10, VALUE_NULLABLE_REQUIRED=None
+ VALUE=1,
+ VALUE_ALSO_REQUIRED=10,
+ VALUE_NULLABLE_REQUIRED=None,
)
+
print(settings_from_init.json(exclude_unset=True, indent=1))
# from env vars
- monkeypatch.setenv("VALUE", "1")
- monkeypatch.setenv("VALUE_ALSO_REQUIRED", "10")
- monkeypatch.setenv(
- "VALUE_NULLABLE_REQUIRED", "null"
+ setenvs_from_dict(
+ monkeypatch,
+ {
+ "VALUE": "1",
+ "VALUE_ALSO_REQUIRED": "10",
+ "VALUE_NULLABLE_REQUIRED": "null",
+ },
) # WARNING: set this env to None would not work w/o ``parse_none`` validator! bug???
settings_from_env = Settings()
diff --git a/packages/settings-library/tests/test_postgres.py b/packages/settings-library/tests/test_postgres.py
index 1708acc78081..21f334c951c3 100644
--- a/packages/settings-library/tests/test_postgres.py
+++ b/packages/settings-library/tests/test_postgres.py
@@ -3,7 +3,11 @@
# pylint: disable=unused-variable
+from urllib.parse import urlparse
+
import pytest
+from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict
+from pytest_simcore.helpers.typing_env import EnvVarsDict
from settings_library.postgres import PostgresSettings
@@ -12,9 +16,16 @@ def env_file():
return ".env-sample"
-def test_cached_property_dsn(mock_environment: dict):
+@pytest.fixture
+def mock_environment(mock_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch):
+ return mock_environment | setenvs_from_dict(
+ monkeypatch, {"POSTGRES_CLIENT_NAME": "Some &43 funky name"}
+ )
- settings = PostgresSettings()
+
+def test_cached_property_dsn(mock_environment: EnvVarsDict):
+
+ settings = PostgresSettings.create_from_envs()
# all are upper-case
assert all(key == key.upper() for key in settings.dict())
@@ -28,20 +39,30 @@ def test_cached_property_dsn(mock_environment: dict):
assert "dsn" in settings.dict()
-def test_dsn_with_query(mock_environment: dict, monkeypatch):
-
+def test_dsn_with_query(mock_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch):
settings = PostgresSettings()
- assert not settings.POSTGRES_CLIENT_NAME
+ assert settings.POSTGRES_CLIENT_NAME
assert settings.dsn == "postgresql://foo:secret@localhost:5432/foodb"
-
- # now with app
- monkeypatch.setenv("POSTGRES_CLIENT_NAME", "Some &43 funky name")
-
- settings_with_app = PostgresSettings()
-
- assert settings_with_app.POSTGRES_CLIENT_NAME
assert (
- settings_with_app.dsn_with_query
+ settings.dsn_with_query
== "postgresql://foo:secret@localhost:5432/foodb?application_name=Some+%2643+funky+name"
)
+
+ with monkeypatch.context() as patch:
+ patch.delenv("POSTGRES_CLIENT_NAME")
+ settings = PostgresSettings()
+
+ assert not settings.POSTGRES_CLIENT_NAME
+ assert settings.dsn == settings.dsn_with_query
+
+
+def test_dsn_with_async_sqlalchemy_has_query(
+ mock_environment: EnvVarsDict, monkeypatch
+):
+ settings = PostgresSettings()
+
+ parsed_url = urlparse(settings.dsn_with_async_sqlalchemy)
+ assert parsed_url.scheme.split("+") == ("postgresql", "asyncpg")
+
+ assert not parsed_url.query
diff --git a/services/catalog/src/simcore_service_catalog/core/events.py b/services/catalog/src/simcore_service_catalog/core/events.py
index fb2329019b52..f22adbba4ece 100644
--- a/services/catalog/src/simcore_service_catalog/core/events.py
+++ b/services/catalog/src/simcore_service_catalog/core/events.py
@@ -3,7 +3,7 @@
from typing import TypeAlias
from fastapi import FastAPI
-from servicelib.db_async_engine import close_db_connection, connect_to_db
+from servicelib.fastapi.db_asyncpg_engine import close_db_connection, connect_to_db
from servicelib.logging_utils import log_context
from .._meta import APP_FINISHED_BANNER_MSG, APP_STARTED_BANNER_MSG
diff --git a/services/payments/src/simcore_service_payments/services/healthchecks.py b/services/payments/src/simcore_service_payments/services/healthchecks.py
index 98774700f44b..be6344c00ef7 100644
--- a/services/payments/src/simcore_service_payments/services/healthchecks.py
+++ b/services/payments/src/simcore_service_payments/services/healthchecks.py
@@ -2,10 +2,10 @@
import logging
from models_library.healthchecks import LivenessResult
+from servicelib.db_asyncpg_utils import check_postgres_liveness
from sqlalchemy.ext.asyncio import AsyncEngine
from .payments_gateway import PaymentsGatewayApi
-from .postgres import check_postgres_liveness
from .resource_usage_tracker import ResourceUsageTrackerApi
_logger = logging.getLogger(__name__)
diff --git a/services/payments/src/simcore_service_payments/services/postgres.py b/services/payments/src/simcore_service_payments/services/postgres.py
index ba68eae0fac4..fd84fba45ce7 100644
--- a/services/payments/src/simcore_service_payments/services/postgres.py
+++ b/services/payments/src/simcore_service_payments/services/postgres.py
@@ -1,10 +1,5 @@
-import time
-from datetime import timedelta
-
from fastapi import FastAPI
-from models_library.healthchecks import IsNonResponsive, IsResponsive, LivenessResult
-from servicelib.db_async_engine import close_db_connection, connect_to_db
-from sqlalchemy.exc import SQLAlchemyError
+from servicelib.fastapi.db_asyncpg_engine import close_db_connection, connect_to_db
from sqlalchemy.ext.asyncio import AsyncEngine
from ..core.settings import ApplicationSettings
@@ -16,18 +11,6 @@ def get_engine(app: FastAPI) -> AsyncEngine:
return engine
-async def check_postgres_liveness(engine: AsyncEngine) -> LivenessResult:
- try:
- tic = time.time()
- # test
- async with engine.connect():
- ...
- elapsed_time = time.time() - tic
- return IsResponsive(elapsed=timedelta(seconds=elapsed_time))
- except SQLAlchemyError as err:
- return IsNonResponsive(reason=f"{err}")
-
-
def setup_postgres(app: FastAPI):
app.state.engine = None
diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/db/__init__.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/db/__init__.py
index bca3083383ce..42062cb30ba6 100644
--- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/db/__init__.py
+++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/db/__init__.py
@@ -1,5 +1,5 @@
from fastapi import FastAPI
-from servicelib.db_async_engine import close_db_connection, connect_to_db
+from servicelib.fastapi.db_asyncpg_engine import close_db_connection, connect_to_db
def setup(app: FastAPI):
diff --git a/services/storage/src/simcore_service_storage/constants.py b/services/storage/src/simcore_service_storage/constants.py
index f9c6a36f5c26..24ba188f4d27 100644
--- a/services/storage/src/simcore_service_storage/constants.py
+++ b/services/storage/src/simcore_service_storage/constants.py
@@ -39,7 +39,7 @@
MAX_CONCURRENT_REST_CALLS: Final[int] = 10
# DATABASE ----------------------------
-APP_DB_ENGINE_KEY = f"{__name__}.db_engine"
+APP_AIOPG_ENGINE_KEY = f"{__name__}.db_engine"
MAX_CONCURRENT_DB_TASKS: Final[int] = 2
# DATA STORAGE MANAGER ----------------------------------
diff --git a/services/storage/src/simcore_service_storage/db.py b/services/storage/src/simcore_service_storage/db.py
index dd00a861b447..a4063f40e2d3 100644
--- a/services/storage/src/simcore_service_storage/db.py
+++ b/services/storage/src/simcore_service_storage/db.py
@@ -13,7 +13,7 @@
)
from tenacity import retry
-from .constants import APP_CONFIG_KEY, APP_DB_ENGINE_KEY
+from .constants import APP_AIOPG_ENGINE_KEY, APP_CONFIG_KEY
_logger = logging.getLogger(__name__)
@@ -46,7 +46,7 @@ async def postgres_cleanup_ctx(app: web.Application):
) as engine:
assert engine # nosec
- app[APP_DB_ENGINE_KEY] = engine
+ app[APP_AIOPG_ENGINE_KEY] = engine
_logger.info("Created pg engine for %s", dsn)
yield # ----------
_logger.info("Deleting pg engine for %s", dsn)
@@ -55,11 +55,11 @@ async def postgres_cleanup_ctx(app: web.Application):
async def is_service_responsive(app: web.Application) -> bool:
"""Returns true if the app can connect to db service"""
- return await is_pg_responsive(engine=app[APP_DB_ENGINE_KEY])
+ return await is_pg_responsive(engine=app[APP_AIOPG_ENGINE_KEY])
def get_engine_state(app: web.Application) -> dict[str, Any]:
- engine: Engine | None = app.get(APP_DB_ENGINE_KEY)
+ engine: Engine | None = app.get(APP_AIOPG_ENGINE_KEY)
if engine:
engine_info: dict[str, Any] = get_pg_engine_stateinfo(engine)
return engine_info
@@ -67,7 +67,7 @@ def get_engine_state(app: web.Application) -> dict[str, Any]:
def setup_db(app: web.Application):
- app[APP_DB_ENGINE_KEY] = None
+ app[APP_AIOPG_ENGINE_KEY] = None
# app is created at this point but not yet started
_logger.debug("Setting up %s [service: %s] ...", __name__, "postgres")
diff --git a/services/storage/src/simcore_service_storage/db_tokens.py b/services/storage/src/simcore_service_storage/db_tokens.py
index 456d90366f9a..445a7c220d11 100644
--- a/services/storage/src/simcore_service_storage/db_tokens.py
+++ b/services/storage/src/simcore_service_storage/db_tokens.py
@@ -7,7 +7,7 @@
from models_library.users import UserID
from simcore_postgres_database.storage_models import tokens
-from .constants import APP_CONFIG_KEY, APP_DB_ENGINE_KEY
+from .constants import APP_AIOPG_ENGINE_KEY, APP_CONFIG_KEY
log = logging.getLogger(__name__)
@@ -27,7 +27,7 @@ async def get_api_token_and_secret(
app: web.Application, user_id: UserID
) -> tuple[str, str]:
# from the client side together with the userid?
- engine = app[APP_DB_ENGINE_KEY]
+ engine = app[APP_AIOPG_ENGINE_KEY]
# defaults from config if any, othewise None
api_token = app[APP_CONFIG_KEY].BF_API_KEY
diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py
index 88d3715c36e1..db5a1ab288b5 100644
--- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py
+++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py
@@ -38,8 +38,8 @@
from . import db_file_meta_data, db_projects, db_tokens
from .constants import (
+ APP_AIOPG_ENGINE_KEY,
APP_CONFIG_KEY,
- APP_DB_ENGINE_KEY,
DATCORE_ID,
EXPAND_DIR_MAX_ITEM_COUNT,
MAX_CONCURRENT_S3_TASKS,
@@ -1084,7 +1084,7 @@ def create_simcore_s3_data_manager(app: web.Application) -> SimcoreS3DataManager
cfg: Settings = app[APP_CONFIG_KEY]
assert cfg.STORAGE_S3 # nosec
return SimcoreS3DataManager(
- engine=app[APP_DB_ENGINE_KEY],
+ engine=app[APP_AIOPG_ENGINE_KEY],
simcore_bucket_name=parse_obj_as(S3BucketName, cfg.STORAGE_S3.S3_BUCKET_NAME),
app=app,
settings=cfg,
diff --git a/services/web/server/src/simcore_service_webserver/_constants.py b/services/web/server/src/simcore_service_webserver/_constants.py
index 91b70f453074..aafa109b47da 100644
--- a/services/web/server/src/simcore_service_webserver/_constants.py
+++ b/services/web/server/src/simcore_service_webserver/_constants.py
@@ -4,8 +4,8 @@
from typing import Final
from servicelib.aiohttp.application_keys import (
+ APP_AIOPG_ENGINE_KEY,
APP_CONFIG_KEY,
- APP_DB_ENGINE_KEY,
APP_FIRE_AND_FORGET_TASKS_KEY,
APP_SETTINGS_KEY,
)
@@ -30,7 +30,7 @@
__all__: tuple[str, ...] = (
"APP_CONFIG_KEY",
- "APP_DB_ENGINE_KEY",
+ "APP_AIOPG_ENGINE_KEY",
"APP_FIRE_AND_FORGET_TASKS_KEY",
"APP_SETTINGS_KEY",
"RQT_USERID_KEY",
diff --git a/services/web/server/src/simcore_service_webserver/api_keys/_db.py b/services/web/server/src/simcore_service_webserver/api_keys/_db.py
index a01a49c9a0b5..0e60051f8958 100644
--- a/services/web/server/src/simcore_service_webserver/api_keys/_db.py
+++ b/services/web/server/src/simcore_service_webserver/api_keys/_db.py
@@ -10,7 +10,7 @@
from models_library.basic_types import IdInt
from models_library.products import ProductName
from models_library.users import UserID
-from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY
+from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY
from simcore_postgres_database.models.api_keys import api_keys
from sqlalchemy.dialects.postgresql import insert as pg_insert
@@ -23,7 +23,7 @@ class ApiKeyRepo:
@classmethod
def create_from_app(cls, app: web.Application):
- return cls(engine=app[APP_DB_ENGINE_KEY])
+ return cls(engine=app[APP_AIOPG_ENGINE_KEY])
async def list_names(
self, *, user_id: UserID, product_name: ProductName
diff --git a/services/web/server/src/simcore_service_webserver/db/_aiopg.py b/services/web/server/src/simcore_service_webserver/db/_aiopg.py
new file mode 100644
index 000000000000..f6944e5ef679
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/db/_aiopg.py
@@ -0,0 +1,105 @@
+"""
+Helpers on aiopg
+
+SEE migration aiopg->asyncpg https://github.com/ITISFoundation/osparc-simcore/issues/4529
+"""
+
+import logging
+from collections.abc import AsyncIterator
+from typing import Any, cast
+
+from aiohttp import web
+from aiopg.sa import Engine, create_engine
+from models_library.utils.json_serialization import json_dumps
+from servicelib.aiohttp.aiopg_utils import is_pg_responsive
+from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY
+from servicelib.logging_utils import log_context
+from servicelib.retry_policies import PostgresRetryPolicyUponInitialization
+from simcore_postgres_database.errors import DBAPIError
+from simcore_postgres_database.utils_aiopg import (
+ DBMigrationError,
+ close_engine,
+ get_pg_engine_stateinfo,
+ raise_if_migration_not_ready,
+)
+from tenacity import retry
+
+from .settings import PostgresSettings, get_plugin_settings
+
+_logger = logging.getLogger(__name__)
+
+
+@retry(**PostgresRetryPolicyUponInitialization(_logger).kwargs)
+async def _ensure_pg_ready(settings: PostgresSettings) -> Engine:
+ engine: Engine = await create_engine(
+ settings.dsn,
+ application_name=settings.POSTGRES_CLIENT_NAME,
+ minsize=settings.POSTGRES_MINSIZE,
+ maxsize=settings.POSTGRES_MAXSIZE,
+ )
+
+ try:
+ await raise_if_migration_not_ready(engine)
+ except (DBMigrationError, DBAPIError):
+ await close_engine(engine)
+ raise
+
+ return engine # tenacity rules guarantee exit with exc
+
+
+async def postgres_cleanup_ctx(app: web.Application) -> AsyncIterator[None]:
+
+ settings = get_plugin_settings(app)
+
+ with log_context(
+ _logger,
+ logging.INFO,
+ "Connecting app[APP_AIOPG_ENGINE_KEY] to postgres with %s",
+ f"{settings=}",
+ ):
+ aiopg_engine = await _ensure_pg_ready(settings)
+ app[APP_AIOPG_ENGINE_KEY] = aiopg_engine
+
+ _logger.info(
+ "app[APP_AIOPG_ENGINE_KEY] created %s",
+ json_dumps(get_engine_state(app), indent=1),
+ )
+
+ yield # -------------------
+
+ if aiopg_engine is not app.get(APP_AIOPG_ENGINE_KEY):
+ _logger.critical(
+ "app[APP_AIOPG_ENGINE_KEY] does not hold right db engine. Somebody has changed it??"
+ )
+
+ await close_engine(aiopg_engine)
+
+ _logger.debug(
+ "app[APP_AIOPG_ENGINE_KEY] after shutdown %s (closed=%s): %s",
+ aiopg_engine.dsn,
+ aiopg_engine.closed,
+ json_dumps(get_engine_state(app), indent=1),
+ )
+
+
+def is_service_enabled(app: web.Application):
+ return app.get(APP_AIOPG_ENGINE_KEY) is not None
+
+
+async def is_service_responsive(app: web.Application):
+ """Returns true if the app can connect to db service"""
+ if not is_service_enabled(app):
+ return False
+ return await is_pg_responsive(engine=app[APP_AIOPG_ENGINE_KEY])
+
+
+def get_engine_state(app: web.Application) -> dict[str, Any]:
+ engine: Engine | None = app.get(APP_AIOPG_ENGINE_KEY)
+ if engine:
+ pg_engine_stateinfo: dict[str, Any] = get_pg_engine_stateinfo(engine)
+ return pg_engine_stateinfo
+ return {}
+
+
+def get_database_engine(app: web.Application) -> Engine:
+ return cast(Engine, app[APP_AIOPG_ENGINE_KEY])
diff --git a/services/web/server/src/simcore_service_webserver/db/_asyncpg.py b/services/web/server/src/simcore_service_webserver/db/_asyncpg.py
new file mode 100644
index 000000000000..03bac23ea2c2
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/db/_asyncpg.py
@@ -0,0 +1,38 @@
+"""
+Helpers on asyncpg
+
+SEE migration aiopg->asyncpg https://github.com/ITISFoundation/osparc-simcore/issues/4529
+"""
+
+import logging
+from collections.abc import AsyncIterator
+
+from aiohttp import web
+from servicelib.aiohttp.db_asyncpg_engine import (
+ close_db_connection,
+ connect_to_db,
+ get_async_engine,
+)
+from sqlalchemy.ext.asyncio import AsyncEngine
+
+from .settings import PostgresSettings, get_plugin_settings
+
+_logger = logging.getLogger(__name__)
+
+
+async def postgres_cleanup_ctx(app: web.Application) -> AsyncIterator[None]:
+ settings: PostgresSettings = get_plugin_settings(app)
+ await connect_to_db(app, settings)
+
+ assert get_async_engine(app) # nosec
+ assert isinstance(get_async_engine(app), AsyncEngine) # nosec
+
+ yield
+
+ await close_db_connection(app)
+
+
+__all__: tuple[str, ...] = (
+ "get_async_engine",
+ "postgres_cleanup_ctx",
+)
diff --git a/services/web/server/src/simcore_service_webserver/db/base_repository.py b/services/web/server/src/simcore_service_webserver/db/base_repository.py
index 0e5902fbc187..96c1ad90e388 100644
--- a/services/web/server/src/simcore_service_webserver/db/base_repository.py
+++ b/services/web/server/src/simcore_service_webserver/db/base_repository.py
@@ -2,7 +2,7 @@
from aiopg.sa.engine import Engine
from models_library.users import UserID
-from .._constants import APP_DB_ENGINE_KEY, RQT_USERID_KEY
+from .._constants import APP_AIOPG_ENGINE_KEY, RQT_USERID_KEY
class BaseRepository:
@@ -15,12 +15,13 @@ def __init__(self, engine: Engine, user_id: UserID | None = None):
@classmethod
def create_from_request(cls, request: web.Request):
return cls(
- engine=request.app[APP_DB_ENGINE_KEY], user_id=request.get(RQT_USERID_KEY)
+ engine=request.app[APP_AIOPG_ENGINE_KEY],
+ user_id=request.get(RQT_USERID_KEY),
)
@classmethod
def create_from_app(cls, app: web.Application):
- return cls(engine=app[APP_DB_ENGINE_KEY], user_id=None)
+ return cls(engine=app[APP_AIOPG_ENGINE_KEY], user_id=None)
@property
def engine(self) -> Engine:
diff --git a/services/web/server/src/simcore_service_webserver/db/plugin.py b/services/web/server/src/simcore_service_webserver/db/plugin.py
index 720637bf9935..4d9acbf70ae0 100644
--- a/services/web/server/src/simcore_service_webserver/db/plugin.py
+++ b/services/web/server/src/simcore_service_webserver/db/plugin.py
@@ -3,97 +3,16 @@
"""
import logging
-from collections.abc import AsyncIterator
-from typing import Any, cast
from aiohttp import web
-from aiopg.sa import Engine, create_engine
-from models_library.utils.json_serialization import json_dumps
-from servicelib.aiohttp.aiopg_utils import is_pg_responsive
-from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY
+from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY
from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup
-from servicelib.retry_policies import PostgresRetryPolicyUponInitialization
-from simcore_postgres_database.errors import DBAPIError
-from simcore_postgres_database.utils_aiopg import (
- DBMigrationError,
- close_engine,
- get_pg_engine_stateinfo,
- raise_if_migration_not_ready,
-)
-from tenacity import retry
-from .settings import PostgresSettings, get_plugin_settings
+from . import _aiopg, _asyncpg
_logger = logging.getLogger(__name__)
-@retry(**PostgresRetryPolicyUponInitialization(_logger).kwargs)
-async def _ensure_pg_ready(settings: PostgresSettings) -> Engine:
-
- _logger.info("Connecting to postgres with %s", f"{settings=}")
- engine: Engine = await create_engine(
- settings.dsn,
- application_name=settings.POSTGRES_CLIENT_NAME,
- minsize=settings.POSTGRES_MINSIZE,
- maxsize=settings.POSTGRES_MAXSIZE,
- )
-
- try:
- await raise_if_migration_not_ready(engine)
- except (DBMigrationError, DBAPIError):
- await close_engine(engine)
- raise
-
- _logger.info("Connection to postgres with %s succeeded", f"{settings=}")
- return engine # tenacity rules guarantee exit with exc
-
-
-async def postgres_cleanup_ctx(app: web.Application) -> AsyncIterator[None]:
-
- settings = get_plugin_settings(app)
- aiopg_engine = await _ensure_pg_ready(settings)
- app[APP_DB_ENGINE_KEY] = aiopg_engine
-
- _logger.info("pg engine created %s", json_dumps(get_engine_state(app), indent=1))
-
- yield # -------------------
-
- if aiopg_engine is not app.get(APP_DB_ENGINE_KEY):
- _logger.critical("app does not hold right db engine. Somebody has changed it??")
-
- await close_engine(aiopg_engine)
-
- _logger.debug(
- "pg engine created after shutdown %s (closed=%s): %s",
- aiopg_engine.dsn,
- aiopg_engine.closed,
- json_dumps(get_engine_state(app), indent=1),
- )
-
-
-def is_service_enabled(app: web.Application):
- return app.get(APP_DB_ENGINE_KEY) is not None
-
-
-async def is_service_responsive(app: web.Application):
- """Returns true if the app can connect to db service"""
- if not is_service_enabled(app):
- return False
- return await is_pg_responsive(engine=app[APP_DB_ENGINE_KEY])
-
-
-def get_engine_state(app: web.Application) -> dict[str, Any]:
- engine: Engine | None = app.get(APP_DB_ENGINE_KEY)
- if engine:
- pg_engine_stateinfo: dict[str, Any] = get_pg_engine_stateinfo(engine)
- return pg_engine_stateinfo
- return {}
-
-
-def get_database_engine(app: web.Application) -> Engine:
- return cast(Engine, app[APP_DB_ENGINE_KEY])
-
-
@app_module_setup(
"simcore_service_webserver.db",
ModuleCategory.ADDON,
@@ -103,7 +22,18 @@ def get_database_engine(app: web.Application) -> Engine:
def setup_db(app: web.Application):
# ensures keys exist
- app[APP_DB_ENGINE_KEY] = None
+ app[APP_AIOPG_ENGINE_KEY] = None
# async connection to db
- app.cleanup_ctx.append(postgres_cleanup_ctx)
+ app.cleanup_ctx.append(_aiopg.postgres_cleanup_ctx)
+ app.cleanup_ctx.append(_asyncpg.postgres_cleanup_ctx)
+
+
+# aiopg helpers
+get_aiopg_engine = _aiopg.get_database_engine
+get_aiopg_engine_state = _aiopg.get_engine_state
+is_service_responsive = _aiopg.is_service_responsive
+is_service_enabled = _aiopg.is_service_enabled
+
+# asyncpg helpers
+get_asyncpg_engine = _asyncpg.get_async_engine
diff --git a/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py b/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py
index eba764a25046..19d5e2e79d4f 100644
--- a/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py
+++ b/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py
@@ -18,7 +18,7 @@
from models_library.projects_nodes_io import NodeID
from models_library.projects_state import RunningState
from pydantic.types import PositiveInt
-from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY
+from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY
from simcore_postgres_database.webserver_models import DB_CHANNEL_NAME, projects
from sqlalchemy.sql import select
@@ -159,7 +159,7 @@ async def _comp_tasks_listening_task(app: web.Application) -> None:
while True:
try:
# create a special connection here
- db_engine = app[APP_DB_ENGINE_KEY]
+ db_engine = app[APP_AIOPG_ENGINE_KEY]
_logger.info("listening to comp_task events...")
await _listen(app, db_engine)
except asyncio.CancelledError: # noqa: PERF203
diff --git a/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py b/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py
index 13154bf5723d..661f4f3e6e02 100644
--- a/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py
@@ -116,7 +116,7 @@ def _get_client_session_info():
async def _check_pg():
check.services["postgres"] = {
"healthy": await plugin.is_service_responsive(request.app),
- "pool": plugin.get_engine_state(request.app),
+ "pool": plugin.get_aiopg_engine_state(request.app),
}
async def _check_storage():
diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py b/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py
index fb80f3aa3fac..b373e4033727 100644
--- a/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py
@@ -27,7 +27,7 @@
from .._constants import RQ_PRODUCT_KEY
from .._meta import API_VTAG as VTAG
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from ..login.decorators import login_required
from ..products import api as products_api
from ..security.decorators import permission_required
@@ -91,7 +91,7 @@ async def start_computation(request: web.Request) -> web.Response:
X_SIMCORE_USER_AGENT, UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE
)
- async with get_database_engine(request.app).acquire() as conn:
+ async with get_aiopg_engine(request.app).acquire() as conn:
group_properties = (
await GroupExtraPropertiesRepo.get_aggregated_properties_for_user(
conn, user_id=req_ctx.user_id, product_name=req_ctx.product_name
diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
index 5c1dcf4d47f8..facc2eac3c88 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
@@ -22,7 +22,7 @@
from sqlalchemy.orm import aliased
from sqlalchemy.sql import asc, desc, select
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from .errors import FolderAccessForbiddenError, FolderNotFoundError
_logger = logging.getLogger(__name__)
@@ -54,7 +54,7 @@ async def create(
user_id is not None and workspace_id is not None
), "Both user_id and workspace_id cannot be provided at the same time. Please provide only one."
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
folders_v2.insert()
.values(
@@ -117,7 +117,7 @@ async def list_(
list_query = base_query.order_by(desc(getattr(folders_v2.c, order_by.field)))
list_query = list_query.offset(offset).limit(limit)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
count_result = await conn.execute(count_query)
total_count = await count_result.scalar()
@@ -142,7 +142,7 @@ async def get(
)
)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(query)
row = await result.first()
if row is None:
@@ -178,7 +178,7 @@ async def get_for_user_or_workspace(
else:
query = query.where(folders_v2.c.workspace_id == workspace_id)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(query)
row = await result.first()
if row is None:
@@ -196,7 +196,7 @@ async def update(
parent_folder_id: FolderID | None,
product_name: ProductName,
) -> FolderDB:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
folders_v2.update()
.values(
@@ -222,7 +222,7 @@ async def delete_recursively(
folder_id: FolderID,
product_name: ProductName,
) -> None:
- async with get_database_engine(app).acquire() as conn, conn.begin():
+ async with get_aiopg_engine(app).acquire() as conn, conn.begin():
# Step 1: Define the base case for the recursive CTE
base_query = select(
folders_v2.c.folder_id, folders_v2.c.parent_folder_id
@@ -276,7 +276,7 @@ async def get_projects_recursively_only_if_user_is_owner(
or the `users_to_groups` table for private workspace projects.
"""
- async with get_database_engine(app).acquire() as conn, conn.begin():
+ async with get_aiopg_engine(app).acquire() as conn, conn.begin():
# Step 1: Define the base case for the recursive CTE
base_query = select(
folders_v2.c.folder_id, folders_v2.c.parent_folder_id
diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_users.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_users.py
index 229932958197..3287887da4af 100644
--- a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_users.py
+++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_users.py
@@ -15,7 +15,7 @@
from tenacity.before_sleep import before_sleep_log
from tenacity.wait import wait_exponential
-from .._constants import APP_DB_ENGINE_KEY
+from .._constants import APP_AIOPG_ENGINE_KEY
from ..login.utils import notify_user_logout
from ..security.api import clean_auth_policy_cache
from ..users.api import update_expired_users
@@ -60,7 +60,7 @@ async def _update_expired_users(app: web.Application):
"""
It is resilient, i.e. if update goes wrong, it waits a bit and retries
"""
- engine: Engine = app[APP_DB_ENGINE_KEY]
+ engine: Engine = app[APP_AIOPG_ENGINE_KEY]
assert engine # nosec
if updated := await update_expired_users(engine):
diff --git a/services/web/server/src/simcore_service_webserver/groups/_classifiers.py b/services/web/server/src/simcore_service_webserver/groups/_classifiers.py
index 5ac89e0ee941..0a71801a115e 100644
--- a/services/web/server/src/simcore_service_webserver/groups/_classifiers.py
+++ b/services/web/server/src/simcore_service_webserver/groups/_classifiers.py
@@ -26,7 +26,7 @@
)
from simcore_postgres_database.models.classifiers import group_classifiers
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from ..scicrunch.db import ResearchResourceRepository
from ..scicrunch.service_client import SciCrunch
@@ -75,7 +75,7 @@ class Classifiers(BaseModel):
class GroupClassifierRepository:
def __init__(self, app: web.Application):
- self.engine = get_database_engine(app)
+ self.engine = get_aiopg_engine(app)
async def _get_bundle(self, gid: int) -> RowProxy | None:
async with self.engine.acquire() as conn:
diff --git a/services/web/server/src/simcore_service_webserver/groups/api.py b/services/web/server/src/simcore_service_webserver/groups/api.py
index e0a837f83996..d01012c1f7c2 100644
--- a/services/web/server/src/simcore_service_webserver/groups/api.py
+++ b/services/web/server/src/simcore_service_webserver/groups/api.py
@@ -6,7 +6,7 @@
from models_library.groups import Group
from models_library.users import GroupID, UserID
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from ..users.api import get_user
from . import _db
from ._utils import AccessRightsDict
@@ -21,7 +21,7 @@ async def list_user_groups_with_read_access(
"""
# NOTE: Careful! It seems we are filtering out groups, such as Product Groups,
# because they do not have read access. I believe this was done because the frontend did not want to display them.
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
return await _db.get_all_user_groups_with_read_access(conn, user_id=user_id)
@@ -29,7 +29,7 @@ async def list_all_user_groups(app: web.Application, user_id: UserID) -> list[Gr
"""
Return all user groups
"""
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
groups_db = await _db.get_all_user_groups(conn, user_id=user_id)
return [Group.construct(**group.dict()) for group in groups_db]
@@ -44,7 +44,7 @@ async def get_user_group(
raises GroupNotFoundError
raises UserInsufficientRightsError
"""
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
return await _db.get_user_group(conn, user_id=user_id, gid=gid)
@@ -55,7 +55,7 @@ async def get_product_group_for_user(
Returns product's group if user belongs to it, otherwise it
raises GroupNotFoundError
"""
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
return await _db.get_product_group_for_user(
conn, user_id=user_id, product_gid=product_gid
)
@@ -64,7 +64,7 @@ async def get_product_group_for_user(
async def create_user_group(
app: web.Application, user_id: UserID, new_group: dict
) -> dict[str, Any]:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
return await _db.create_user_group(conn, user_id=user_id, new_group=new_group)
@@ -74,7 +74,7 @@ async def update_user_group(
gid: GroupID,
new_group_values: dict[str, str],
) -> dict[str, str]:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
return await _db.update_user_group(
conn, user_id=user_id, gid=gid, new_group_values=new_group_values
)
@@ -83,28 +83,28 @@ async def update_user_group(
async def delete_user_group(
app: web.Application, user_id: UserID, gid: GroupID
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
return await _db.delete_user_group(conn, user_id=user_id, gid=gid)
async def list_users_in_group(
app: web.Application, user_id: UserID, gid: GroupID
) -> list[dict[str, str]]:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
return await _db.list_users_in_group(conn, user_id=user_id, gid=gid)
async def auto_add_user_to_groups(app: web.Application, user_id: UserID) -> None:
user: dict = await get_user(app, user_id)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
return await _db.auto_add_user_to_groups(conn, user=user)
async def auto_add_user_to_product_group(
app: web.Application, user_id: UserID, product_name: str
) -> GroupID:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
return await _db.auto_add_user_to_product_group(
conn, user_id=user_id, product_name=product_name
)
@@ -113,7 +113,7 @@ async def auto_add_user_to_product_group(
async def is_user_by_email_in_group(
app: web.Application, user_email: LowerCaseEmailStr, group_id: GroupID
) -> bool:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
return await _db.is_user_by_email_in_group(
conn,
email=user_email,
@@ -141,7 +141,7 @@ async def add_user_in_group(
msg = "Invalid method call, missing user id or user email"
raise GroupsError(msg)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
if new_user_email:
user: RowProxy = await _db.get_user_from_email(conn, new_user_email)
new_user_id = user["id"]
@@ -162,7 +162,7 @@ async def add_user_in_group(
async def get_user_in_group(
app: web.Application, user_id: UserID, gid: GroupID, the_user_id_in_group: int
) -> dict[str, str]:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
return await _db.get_user_in_group(
conn, user_id=user_id, gid=gid, the_user_id_in_group=the_user_id_in_group
)
@@ -175,7 +175,7 @@ async def update_user_in_group(
the_user_id_in_group: int,
new_values_for_user_in_group: dict,
) -> dict[str, str]:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
return await _db.update_user_in_group(
conn,
user_id=user_id,
@@ -188,14 +188,14 @@ async def update_user_in_group(
async def delete_user_in_group(
app: web.Application, user_id: UserID, gid: GroupID, the_user_id_in_group: int
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
return await _db.delete_user_in_group(
conn, user_id=user_id, gid=gid, the_user_id_in_group=the_user_id_in_group
)
async def get_group_from_gid(app: web.Application, gid: GroupID) -> Group | None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
group_db = await _db.get_group_from_gid(conn, gid=gid)
if group_db:
diff --git a/services/web/server/src/simcore_service_webserver/login/_auth_api.py b/services/web/server/src/simcore_service_webserver/login/_auth_api.py
index a5de2c1abc59..2e752f7ae985 100644
--- a/services/web/server/src/simcore_service_webserver/login/_auth_api.py
+++ b/services/web/server/src/simcore_service_webserver/login/_auth_api.py
@@ -6,7 +6,7 @@
from simcore_postgres_database.models.users import UserStatus
from simcore_postgres_database.utils_users import UsersRepo
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from ..groups.api import is_user_by_email_in_group
from ..products.api import Product
from ..security.api import check_password, encrypt_password
@@ -30,7 +30,7 @@ async def create_user(
expires_at: datetime | None,
) -> dict[str, Any]:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
user = await UsersRepo.new_user(
conn,
email=email,
diff --git a/services/web/server/src/simcore_service_webserver/login/handlers_change.py b/services/web/server/src/simcore_service_webserver/login/handlers_change.py
index f8b71ce87634..257a4be27e51 100644
--- a/services/web/server/src/simcore_service_webserver/login/handlers_change.py
+++ b/services/web/server/src/simcore_service_webserver/login/handlers_change.py
@@ -8,7 +8,7 @@
from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON
from servicelib.request_keys import RQT_USERID_KEY
from simcore_postgres_database.utils_users import UsersRepo
-from simcore_service_webserver.db.plugin import get_database_engine
+from simcore_service_webserver.db.plugin import get_aiopg_engine
from .._meta import API_VTAG
from ..products.api import Product, get_current_product
@@ -149,7 +149,7 @@ async def submit_request_to_change_email(request: web.Request):
if user["email"] == request_body.email:
return flash_response("Email changed")
- async with get_database_engine(request.app).acquire() as conn:
+ async with get_aiopg_engine(request.app).acquire() as conn:
if await UsersRepo.is_email_used(conn, email=request_body.email):
raise web.HTTPUnprocessableEntity(reason="This email cannot be used")
diff --git a/services/web/server/src/simcore_service_webserver/payments/_autorecharge_db.py b/services/web/server/src/simcore_service_webserver/payments/_autorecharge_db.py
index 8aec3e453593..2b2653b9bfce 100644
--- a/services/web/server/src/simcore_service_webserver/payments/_autorecharge_db.py
+++ b/services/web/server/src/simcore_service_webserver/payments/_autorecharge_db.py
@@ -9,7 +9,7 @@
from pydantic import BaseModel, PositiveInt
from simcore_postgres_database.utils_payments_autorecharge import AutoRechargeStmts
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from .errors import InvalidPaymentMethodError
_logger = logging.getLogger(__name__)
@@ -34,7 +34,7 @@ async def get_wallet_autorecharge(
*,
wallet_id: WalletID,
) -> PaymentsAutorechargeDB | None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
stmt = AutoRechargeStmts.get_wallet_autorecharge(wallet_id)
result = await conn.execute(stmt)
row = await result.first()
@@ -53,7 +53,7 @@ async def replace_wallet_autorecharge(
InvalidPaymentMethodError: if `new` includes some invalid 'primary_payment_method_id'
"""
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
stmt = AutoRechargeStmts.is_valid_payment_method(
user_id=user_id,
wallet_id=new.wallet_id,
diff --git a/services/web/server/src/simcore_service_webserver/payments/_methods_db.py b/services/web/server/src/simcore_service_webserver/payments/_methods_db.py
index b5838eb171c6..afc03beaef65 100644
--- a/services/web/server/src/simcore_service_webserver/payments/_methods_db.py
+++ b/services/web/server/src/simcore_service_webserver/payments/_methods_db.py
@@ -16,7 +16,7 @@
from sqlalchemy import literal_column
from sqlalchemy.sql import func
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from .errors import (
PaymentMethodAlreadyAckedError,
PaymentMethodNotFoundError,
@@ -48,7 +48,7 @@ async def insert_init_payment_method(
wallet_id: WalletID,
initiated_at: datetime.datetime,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
try:
await conn.execute(
payments_methods.insert().values(
@@ -70,7 +70,7 @@ async def list_successful_payment_methods(
user_id: UserID,
wallet_id: WalletID,
) -> list[PaymentsMethodsDB]:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result: ResultProxy = await conn.execute(
payments_methods.select()
.where(
@@ -91,7 +91,7 @@ async def get_successful_payment_method(
wallet_id: WalletID,
payment_method_id: PaymentMethodID,
) -> PaymentsMethodsDB:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result: ResultProxy = await conn.execute(
payments_methods.select().where(
(payments_methods.c.user_id == user_id)
@@ -110,7 +110,7 @@ async def get_successful_payment_method(
async def get_pending_payment_methods_ids(
app: web.Application,
) -> list[PaymentMethodID]:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
sa.select(payments_methods.c.payment_method_id)
.where(payments_methods.c.completed_at == None) # noqa: E711
@@ -141,7 +141,7 @@ async def udpate_payment_method(
if state_message:
optional["state_message"] = state_message
- async with get_database_engine(app).acquire() as conn, conn.begin():
+ async with get_aiopg_engine(app).acquire() as conn, conn.begin():
row = await (
await conn.execute(
sa.select(
@@ -178,7 +178,7 @@ async def delete_payment_method(
wallet_id: WalletID,
payment_method_id: PaymentMethodID,
):
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
await conn.execute(
payments_methods.delete().where(
(payments_methods.c.user_id == user_id)
diff --git a/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py b/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py
index f54f48403bbf..25efdb5562cd 100644
--- a/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py
+++ b/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py
@@ -23,7 +23,7 @@
from simcore_postgres_database.utils_payments import insert_init_payment_transaction
from yarl import URL
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from ..products.api import get_product_stripe_info
from ..resource_usage.api import add_credits_to_wallet
from ..users.api import get_user_display_and_id_names, get_user_invoice_address
@@ -86,7 +86,7 @@ async def _fake_init_payment(
.with_query(id=payment_id)
)
# (2) Annotate INIT transaction
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
await insert_init_payment_transaction(
conn,
payment_id=payment_id,
diff --git a/services/web/server/src/simcore_service_webserver/payments/_onetime_db.py b/services/web/server/src/simcore_service_webserver/payments/_onetime_db.py
index 9f94d46b7077..7bac89e96612 100644
--- a/services/web/server/src/simcore_service_webserver/payments/_onetime_db.py
+++ b/services/web/server/src/simcore_service_webserver/payments/_onetime_db.py
@@ -21,7 +21,7 @@
update_payment_transaction_state,
)
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from .errors import PaymentCompletedError, PaymentNotFoundError
_logger = logging.getLogger(__name__)
@@ -60,7 +60,7 @@ async def list_user_payment_transactions(
Sorted by newest-first
"""
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
total_number_of_items, rows = await get_user_payments_transactions(
conn, user_id=user_id, offset=offset, limit=limit
)
@@ -69,7 +69,7 @@ async def list_user_payment_transactions(
async def get_pending_payment_transactions_ids(app: web.Application) -> list[PaymentID]:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
sa.select(payments_transactions.c.payment_id)
.where(payments_transactions.c.completed_at == None) # noqa: E711
@@ -97,7 +97,7 @@ async def complete_payment_transaction(
if invoice_url:
optional_kwargs["invoice_url"] = invoice_url
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
row = await update_payment_transaction_state(
conn,
payment_id=payment_id,
diff --git a/services/web/server/src/simcore_service_webserver/products/_events.py b/services/web/server/src/simcore_service_webserver/products/_events.py
index 64726ce55c3c..1bee38cf8fb3 100644
--- a/services/web/server/src/simcore_service_webserver/products/_events.py
+++ b/services/web/server/src/simcore_service_webserver/products/_events.py
@@ -14,7 +14,7 @@
get_or_create_product_group,
)
-from .._constants import APP_DB_ENGINE_KEY, APP_PRODUCTS_KEY
+from .._constants import APP_AIOPG_ENGINE_KEY, APP_PRODUCTS_KEY
from ..statics._constants import FRONTEND_APP_DEFAULT, FRONTEND_APPS_AVAILABLE
from ._db import get_product_payment_fields, iter_products
from ._model import Product
@@ -46,7 +46,7 @@ async def auto_create_products_groups(app: web.Application) -> None:
NOTE: could not add this in 'setup_groups' (groups plugin)
since it has to be executed BEFORE 'load_products_on_startup'
"""
- engine = cast(Engine, app[APP_DB_ENGINE_KEY])
+ engine = cast(Engine, app[APP_AIOPG_ENGINE_KEY])
async with engine.acquire() as connection:
async for row in iter_products(connection):
@@ -76,7 +76,7 @@ async def load_products_on_startup(app: web.Application):
Loads info on products stored in the database into app's storage (i.e. memory)
"""
app_products: OrderedDict[str, Product] = OrderedDict()
- engine: Engine = app[APP_DB_ENGINE_KEY]
+ engine: Engine = app[APP_AIOPG_ENGINE_KEY]
async with engine.acquire() as connection:
async for row in iter_products(connection):
assert isinstance(row, RowProxy) # nosec
diff --git a/services/web/server/src/simcore_service_webserver/products/_model.py b/services/web/server/src/simcore_service_webserver/products/_model.py
index cccd77070081..82c4a3b64aaf 100644
--- a/services/web/server/src/simcore_service_webserver/products/_model.py
+++ b/services/web/server/src/simcore_service_webserver/products/_model.py
@@ -193,7 +193,6 @@ class Config:
"invitation_form": True,
"name": "ACME",
"copyright": "© ACME correcaminos",
- "has_landing_page": False,
},
"issues": [
{
diff --git a/services/web/server/src/simcore_service_webserver/projects/_access_rights_api.py b/services/web/server/src/simcore_service_webserver/projects/_access_rights_api.py
index 1c7919a49850..67539364cb7d 100644
--- a/services/web/server/src/simcore_service_webserver/projects/_access_rights_api.py
+++ b/services/web/server/src/simcore_service_webserver/projects/_access_rights_api.py
@@ -4,7 +4,7 @@
from models_library.users import UserID
from simcore_service_webserver.projects._db_utils import PermissionStr
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from ..workspaces.api import get_workspace
from ._access_rights_db import get_project_owner
from .db import APP_PROJECT_DBAPI, ProjectDBAPI
@@ -20,7 +20,7 @@ async def validate_project_ownership(
ProjectInvalidRightsError: if 'user_id' does not own 'project_uuid'
"""
if (
- await get_project_owner(get_database_engine(app), project_uuid=project_uuid)
+ await get_project_owner(get_aiopg_engine(app), project_uuid=project_uuid)
!= user_id
):
raise ProjectInvalidRightsError(user_id=user_id, project_uuid=project_uuid)
diff --git a/services/web/server/src/simcore_service_webserver/projects/_folders_db.py b/services/web/server/src/simcore_service_webserver/projects/_folders_db.py
index 1ac57057c53f..270aabf4dc3a 100644
--- a/services/web/server/src/simcore_service_webserver/projects/_folders_db.py
+++ b/services/web/server/src/simcore_service_webserver/projects/_folders_db.py
@@ -16,7 +16,7 @@
from sqlalchemy import func, literal_column
from sqlalchemy.sql import select
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
_logger = logging.getLogger(__name__)
@@ -43,7 +43,7 @@ async def insert_project_to_folder(
folder_id: FolderID,
private_workspace_user_id_or_none: UserID | None,
) -> ProjectToFolderDB:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
projects_to_folders.insert()
.values(
@@ -76,7 +76,7 @@ async def get_project_to_folder(
& (projects_to_folders.c.user_id == private_workspace_user_id_or_none)
)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(stmt)
row = await result.first()
if row is None:
@@ -90,7 +90,7 @@ async def delete_project_to_folder(
folder_id: FolderID,
private_workspace_user_id_or_none: UserID | None,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
await conn.execute(
projects_to_folders.delete().where(
(projects_to_folders.c.project_uuid == f"{project_id}")
@@ -104,7 +104,7 @@ async def delete_all_project_to_folder_by_project_id(
app: web.Application,
project_id: ProjectID,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
await conn.execute(
projects_to_folders.delete().where(
projects_to_folders.c.project_uuid == f"{project_id}"
diff --git a/services/web/server/src/simcore_service_webserver/projects/_groups_db.py b/services/web/server/src/simcore_service_webserver/projects/_groups_db.py
index 8420d71ef7a8..b1b581208d9e 100644
--- a/services/web/server/src/simcore_service_webserver/projects/_groups_db.py
+++ b/services/web/server/src/simcore_service_webserver/projects/_groups_db.py
@@ -15,7 +15,7 @@
from sqlalchemy.dialects.postgresql import insert as pg_insert
from sqlalchemy.sql import select
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from .exceptions import ProjectGroupNotFoundError
_logger = logging.getLogger(__name__)
@@ -44,7 +44,7 @@ async def create_project_group(
write: bool,
delete: bool,
) -> ProjectGroupGetDB:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
project_to_groups.insert()
.values(
@@ -79,7 +79,7 @@ async def list_project_groups(
.where(project_to_groups.c.project_uuid == f"{project_id}")
)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(stmt)
rows = await result.fetchall() or []
return parse_obj_as(list[ProjectGroupGetDB], rows)
@@ -106,7 +106,7 @@ async def get_project_group(
)
)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(stmt)
row = await result.first()
if row is None:
@@ -125,7 +125,7 @@ async def replace_project_group(
write: bool,
delete: bool,
) -> ProjectGroupGetDB:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
project_to_groups.update()
.values(
@@ -156,7 +156,7 @@ async def update_or_insert_project_group(
write: bool,
delete: bool,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
insert_stmt = pg_insert(project_to_groups).values(
project_uuid=f"{project_id}",
gid=group_id,
@@ -183,7 +183,7 @@ async def delete_project_group(
project_id: ProjectID,
group_id: GroupID,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
await conn.execute(
project_to_groups.delete().where(
(project_to_groups.c.project_uuid == f"{project_id}")
@@ -196,7 +196,7 @@ async def delete_all_project_groups(
app: web.Application,
project_id: ProjectID,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
await conn.execute(
project_to_groups.delete().where(
project_to_groups.c.project_uuid == f"{project_id}"
diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_api.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_api.py
index db27c3359bd3..7b430bbeddd6 100644
--- a/services/web/server/src/simcore_service_webserver/projects/_metadata_api.py
+++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_api.py
@@ -8,7 +8,7 @@
from models_library.users import UserID
from pydantic import parse_obj_as
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from . import _metadata_db
from ._access_rights_api import validate_project_ownership
@@ -21,7 +21,7 @@ async def get_project_custom_metadata(
await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid)
return await _metadata_db.get_project_custom_metadata(
- engine=get_database_engine(app), project_uuid=project_uuid
+ engine=get_aiopg_engine(app), project_uuid=project_uuid
)
@@ -34,7 +34,7 @@ async def set_project_custom_metadata(
await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid)
return await _metadata_db.set_project_custom_metadata(
- engine=get_database_engine(app),
+ engine=get_aiopg_engine(app),
project_uuid=project_uuid,
custom_metadata=value,
)
@@ -49,7 +49,7 @@ async def _project_has_ancestors(
await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid)
return await _metadata_db.project_has_ancestors(
- engine=get_database_engine(app), project_uuid=project_uuid
+ engine=get_aiopg_engine(app), project_uuid=project_uuid
)
@@ -74,11 +74,11 @@ async def set_project_ancestors_from_custom_metadata(
# let's try to get the parent project UUID
parent_project_uuid = await _metadata_db.get_project_id_from_node_id(
- get_database_engine(app), node_id=parent_node_id
+ get_aiopg_engine(app), node_id=parent_node_id
)
await _metadata_db.set_project_ancestors(
- get_database_engine(app),
+ get_aiopg_engine(app),
project_uuid=project_uuid,
parent_project_uuid=parent_project_uuid,
parent_node_id=parent_node_id,
@@ -95,7 +95,7 @@ async def set_project_ancestors(
await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid)
await _metadata_db.set_project_ancestors(
- get_database_engine(app),
+ get_aiopg_engine(app),
project_uuid=project_uuid,
parent_project_uuid=parent_project_uuid,
parent_node_id=parent_node_id,
diff --git a/services/web/server/src/simcore_service_webserver/projects/db.py b/services/web/server/src/simcore_service_webserver/projects/db.py
index 68b305b61cda..73d561712ca1 100644
--- a/services/web/server/src/simcore_service_webserver/projects/db.py
+++ b/services/web/server/src/simcore_service_webserver/projects/db.py
@@ -34,7 +34,7 @@
from models_library.workspaces import WorkspaceID
from pydantic import parse_obj_as
from pydantic.types import PositiveInt
-from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY
+from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY
from servicelib.logging_utils import get_log_record_extra, log_context
from simcore_postgres_database.errors import UniqueViolation
from simcore_postgres_database.models.groups import user_to_groups
@@ -101,11 +101,11 @@
class ProjectDBAPI(BaseProjectDB):
def __init__(self, app: web.Application) -> None:
self._app = app
- self._engine = cast(Engine, app.get(APP_DB_ENGINE_KEY))
+ self._engine = cast(Engine, app.get(APP_AIOPG_ENGINE_KEY))
def _init_engine(self) -> None:
# Delays creation of engine because it setup_db does it on_startup
- self._engine = cast(Engine, self._app.get(APP_DB_ENGINE_KEY))
+ self._engine = cast(Engine, self._app.get(APP_AIOPG_ENGINE_KEY))
if self._engine is None:
msg = "Database subsystem was not initialized"
raise ValueError(msg)
diff --git a/services/web/server/src/simcore_service_webserver/scicrunch/db.py b/services/web/server/src/simcore_service_webserver/scicrunch/db.py
index ca5df617ee8c..11e84b80db62 100644
--- a/services/web/server/src/simcore_service_webserver/scicrunch/db.py
+++ b/services/web/server/src/simcore_service_webserver/scicrunch/db.py
@@ -7,7 +7,7 @@
import sqlalchemy as sa
from aiohttp import web
from aiopg.sa.result import ResultProxy, RowProxy
-from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY
+from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY
from simcore_postgres_database.models.scicrunch_resources import scicrunch_resources
from sqlalchemy.dialects.postgresql import insert as sa_pg_insert
@@ -26,7 +26,7 @@ class ResearchResourceRepository:
# WARNING: interfaces to both ResarchResource and ResearchResourceAtDB
def __init__(self, app: web.Application):
- self._engine = app[APP_DB_ENGINE_KEY]
+ self._engine = app[APP_AIOPG_ENGINE_KEY]
async def list_resources(self) -> list[ResearchResource]:
async with self._engine.acquire() as conn:
diff --git a/services/web/server/src/simcore_service_webserver/security/_authz_policy.py b/services/web/server/src/simcore_service_webserver/security/_authz_policy.py
index 44b6083f566b..327a05ff6208 100644
--- a/services/web/server/src/simcore_service_webserver/security/_authz_policy.py
+++ b/services/web/server/src/simcore_service_webserver/security/_authz_policy.py
@@ -15,7 +15,7 @@
from models_library.users import UserID
from simcore_postgres_database.errors import DatabaseError
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from ._authz_access_model import (
AuthContextDict,
OptionalContext,
@@ -58,7 +58,7 @@ async def _get_auth_or_none(self, *, email: str) -> AuthInfoDict | None:
web.HTTPServiceUnavailable: if database raises an exception
"""
with _handle_exceptions_as_503():
- return await get_active_user_or_none(get_database_engine(self._app), email)
+ return await get_active_user_or_none(get_aiopg_engine(self._app), email)
@cached(
ttl=_AUTHZ_BURST_CACHE_TTL,
@@ -74,7 +74,7 @@ async def _has_access_to_product(
"""
with _handle_exceptions_as_503():
return await is_user_in_product_name(
- get_database_engine(self._app), user_id, product_name
+ get_aiopg_engine(self._app), user_id, product_name
)
@property
diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_catalog.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_catalog.py
index 3df62ebd3797..e504422c3e25 100644
--- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_catalog.py
+++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_catalog.py
@@ -20,7 +20,7 @@
)
from simcore_postgres_database.utils_services import create_select_latest_services_query
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from ._errors import ServiceNotFound
from .settings import StudiesDispatcherSettings, get_plugin_settings
@@ -64,7 +64,7 @@ async def iter_latest_product_services(
assert page_number >= 1 # nosec
assert ((page_number - 1) * page_size) >= 0 # nosec
- engine: Engine = get_database_engine(app)
+ engine: Engine = get_aiopg_engine(app)
settings: StudiesDispatcherSettings = get_plugin_settings(app)
# Select query for latest version of the service
@@ -135,7 +135,7 @@ async def validate_requested_service(
service_key: ServiceKey,
service_version: ServiceVersion,
) -> ValidService:
- engine: Engine = get_database_engine(app)
+ engine: Engine = get_aiopg_engine(app)
async with engine.acquire() as conn:
query = sa.select(
diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_core.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_core.py
index a34a50832d94..a9ffe0f3750c 100644
--- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_core.py
+++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_core.py
@@ -14,7 +14,7 @@
)
from sqlalchemy.dialects.postgresql import ARRAY, INTEGER
-from .._constants import APP_DB_ENGINE_KEY
+from .._constants import APP_AIOPG_ENGINE_KEY
from ._errors import FileToLarge, IncompatibleService
from ._models import ViewerInfo
from .settings import get_plugin_settings
@@ -41,7 +41,7 @@ async def list_viewers_info(
#
consumers: deque = deque()
- async with app[APP_DB_ENGINE_KEY].acquire() as conn:
+ async with app[APP_AIOPG_ENGINE_KEY].acquire() as conn:
# FIXME: ADD CONDITION: service MUST be shared with EVERYBODY!
query = services_consume_filetypes.select()
if file_type:
@@ -119,7 +119,7 @@ def _version(column_or_value):
return await get_default_viewer(app, file_type, file_size)
if service_key and service_version:
- async with app[APP_DB_ENGINE_KEY].acquire() as conn:
+ async with app[APP_AIOPG_ENGINE_KEY].acquire() as conn:
query = (
services_consume_filetypes.select()
.where(
diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects_permalinks.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects_permalinks.py
index 055f0f78fcf8..52fd7caec7d1 100644
--- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects_permalinks.py
+++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects_permalinks.py
@@ -8,7 +8,7 @@
from simcore_postgres_database.models.project_to_groups import project_to_groups
from simcore_postgres_database.models.projects import ProjectType, projects
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from ..projects.api import ProjectPermalink, register_permalink_factory
from ..projects.exceptions import PermalinkNotAllowedError, ProjectNotFoundError
from ..utils_aiohttp import create_url_for_function
@@ -77,7 +77,7 @@ async def permalink_factory(
"""
# NOTE: next iterations will mobe this as part of the project repository pattern
- engine = get_database_engine(request.app)
+ engine = get_aiopg_engine(request.app)
async with engine.acquire() as conn:
access_rights_subquery = (
sa.select(
diff --git a/services/web/server/src/simcore_service_webserver/tags/_api.py b/services/web/server/src/simcore_service_webserver/tags/_api.py
new file mode 100644
index 000000000000..6f3a74853e78
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/tags/_api.py
@@ -0,0 +1,58 @@
+""" _api: implements `tags` plugin **service layer**
+"""
+
+from aiohttp import web
+from models_library.basic_types import IdInt
+from models_library.users import UserID
+from servicelib.aiohttp.db_asyncpg_engine import get_async_engine
+from simcore_postgres_database.utils_tags import TagsRepo
+from sqlalchemy.ext.asyncio import AsyncEngine
+
+from .schemas import TagCreate, TagGet, TagUpdate
+
+
+async def create_tag(
+ app: web.Application, user_id: UserID, new_tag: TagCreate
+) -> TagGet:
+ engine: AsyncEngine = get_async_engine(app)
+
+ repo = TagsRepo(engine)
+ tag = await repo.create(
+ user_id=user_id,
+ read=True,
+ write=True,
+ delete=True,
+ **new_tag.dict(exclude_unset=True),
+ )
+ return TagGet.from_db(tag)
+
+
+async def list_tags(
+ app: web.Application,
+ user_id: UserID,
+) -> list[TagGet]:
+ engine: AsyncEngine = get_async_engine(app)
+ repo = TagsRepo(engine)
+ tags = await repo.list_all(user_id=user_id)
+ return [TagGet.from_db(t) for t in tags]
+
+
+async def update_tag(
+ app: web.Application, user_id: UserID, tag_id: IdInt, tag_updates: TagUpdate
+) -> TagGet:
+ engine: AsyncEngine = get_async_engine(app)
+
+ repo = TagsRepo(engine)
+ tag = await repo.update(
+ user_id=user_id,
+ tag_id=tag_id,
+ **tag_updates.dict(exclude_unset=True),
+ )
+ return TagGet.from_db(tag)
+
+
+async def delete_tag(app: web.Application, user_id: UserID, tag_id: IdInt):
+ engine: AsyncEngine = get_async_engine(app)
+
+ repo = TagsRepo(engine)
+ await repo.delete(user_id=user_id, tag_id=tag_id)
diff --git a/services/web/server/src/simcore_service_webserver/tags/_handlers.py b/services/web/server/src/simcore_service_webserver/tags/_handlers.py
index de0fc7dd5b1a..ac0c08698eb1 100644
--- a/services/web/server/src/simcore_service_webserver/tags/_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/tags/_handlers.py
@@ -1,9 +1,7 @@
import functools
from aiohttp import web
-from aiopg.sa.engine import Engine
from pydantic import parse_obj_as
-from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY
from servicelib.aiohttp.requests_validation import (
parse_request_body_as,
parse_request_path_parameters_as,
@@ -13,16 +11,15 @@
from simcore_postgres_database.utils_tags import (
TagNotFoundError,
TagOperationNotAllowedError,
- TagsRepo,
)
from .._meta import API_VTAG as VTAG
from ..login.decorators import login_required
from ..security.decorators import permission_required
from ..utils_aiohttp import envelope_json_response
+from . import _api
from .schemas import (
TagCreate,
- TagGet,
TagGroupCreate,
TagGroupGet,
TagGroupPathParams,
@@ -55,21 +52,14 @@ async def wrapper(request: web.Request) -> web.StreamResponse:
@permission_required("tag.crud.*")
@_handle_tags_exceptions
async def create_tag(request: web.Request):
- engine: Engine = request.app[APP_DB_ENGINE_KEY]
+ assert request.app # nosec
req_ctx = TagRequestContext.parse_obj(request)
new_tag = await parse_request_body_as(TagCreate, request)
- repo = TagsRepo(user_id=req_ctx.user_id)
- async with engine.acquire() as conn:
- tag = await repo.create(
- conn,
- read=True,
- write=True,
- delete=True,
- **new_tag.dict(exclude_unset=True),
- )
- model = TagGet.from_db(tag)
- return envelope_json_response(model)
+ created = await _api.create_tag(
+ request.app, user_id=req_ctx.user_id, new_tag=new_tag
+ )
+ return envelope_json_response(created)
@routes.get(f"/{VTAG}/tags", name="list_tags")
@@ -77,15 +67,10 @@ async def create_tag(request: web.Request):
@permission_required("tag.crud.*")
@_handle_tags_exceptions
async def list_tags(request: web.Request):
- engine: Engine = request.app[APP_DB_ENGINE_KEY]
- req_ctx = TagRequestContext.parse_obj(request)
- repo = TagsRepo(user_id=req_ctx.user_id)
- async with engine.acquire() as conn:
- tags = await repo.list_all(conn)
- return envelope_json_response(
- [TagGet.from_db(t).dict(by_alias=True) for t in tags]
- )
+ req_ctx = TagRequestContext.parse_obj(request)
+ got = await _api.list_tags(request.app, user_id=req_ctx.user_id)
+ return envelope_json_response(got)
@routes.patch(f"/{VTAG}/tags/{{tag_id}}", name="update_tag")
@@ -93,18 +78,17 @@ async def list_tags(request: web.Request):
@permission_required("tag.crud.*")
@_handle_tags_exceptions
async def update_tag(request: web.Request):
- engine: Engine = request.app[APP_DB_ENGINE_KEY]
req_ctx = TagRequestContext.parse_obj(request)
path_params = parse_request_path_parameters_as(TagPathParams, request)
tag_updates = await parse_request_body_as(TagUpdate, request)
- repo = TagsRepo(user_id=req_ctx.user_id)
- async with engine.acquire() as conn:
- tag = await repo.update(
- conn, path_params.tag_id, **tag_updates.dict(exclude_unset=True)
- )
- model = TagGet.from_db(tag)
- return envelope_json_response(model)
+ updated = await _api.update_tag(
+ request.app,
+ user_id=req_ctx.user_id,
+ tag_id=path_params.tag_id,
+ tag_updates=tag_updates,
+ )
+ return envelope_json_response(updated)
@routes.delete(f"/{VTAG}/tags/{{tag_id}}", name="delete_tag")
@@ -112,13 +96,12 @@ async def update_tag(request: web.Request):
@permission_required("tag.crud.*")
@_handle_tags_exceptions
async def delete_tag(request: web.Request):
- engine: Engine = request.app[APP_DB_ENGINE_KEY]
req_ctx = TagRequestContext.parse_obj(request)
path_params = parse_request_path_parameters_as(TagPathParams, request)
- repo = TagsRepo(user_id=req_ctx.user_id)
- async with engine.acquire() as conn:
- await repo.delete(conn, tag_id=path_params.tag_id)
+ await _api.delete_tag(
+ request.app, user_id=req_ctx.user_id, tag_id=path_params.tag_id
+ )
raise web.HTTPNoContent(content_type=MIMETYPE_APPLICATION_JSON)
diff --git a/services/web/server/src/simcore_service_webserver/users/_api.py b/services/web/server/src/simcore_service_webserver/users/_api.py
index a054bfe59279..3473fc6eb5fb 100644
--- a/services/web/server/src/simcore_service_webserver/users/_api.py
+++ b/services/web/server/src/simcore_service_webserver/users/_api.py
@@ -9,7 +9,7 @@
from pydantic import parse_obj_as
from simcore_postgres_database.models.users import UserStatus
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from . import _db, _schemas
from ._db import get_user_or_raise
from ._db import list_user_permissions as db_list_of_permissions
@@ -39,7 +39,7 @@ async def get_user_credentials(
app: web.Application, *, user_id: UserID
) -> UserCredentialsTuple:
row = await get_user_or_raise(
- get_database_engine(app),
+ get_aiopg_engine(app),
user_id=user_id,
return_column_names=[
"name",
@@ -58,7 +58,7 @@ async def get_user_credentials(
async def set_user_as_deleted(app: web.Application, user_id: UserID) -> None:
await update_user_status(
- get_database_engine(app), user_id=user_id, new_status=UserStatus.DELETED
+ get_aiopg_engine(app), user_id=user_id, new_status=UserStatus.DELETED
)
@@ -74,13 +74,13 @@ async def search_users(
) -> list[_schemas.UserProfile]:
# NOTE: this search is deploy-wide i.e. independent of the product!
rows = await _db.search_users_and_get_profile(
- get_database_engine(app), email_like=_glob_to_sql_like(email_glob)
+ get_aiopg_engine(app), email_like=_glob_to_sql_like(email_glob)
)
async def _list_products_or_none(user_id):
if user_id is not None and include_products:
products = await _db.get_user_products(
- get_database_engine(app), user_id=user_id
+ get_aiopg_engine(app), user_id=user_id
)
return [_.product_name for _ in products]
return None
@@ -137,7 +137,7 @@ async def pre_register_user(
details[f"pre_{key}"] = details.pop(key)
await _db.new_user_details(
- get_database_engine(app),
+ get_aiopg_engine(app),
email=profile.email,
created_by=creator_user_id,
**details,
@@ -153,7 +153,7 @@ async def get_user_invoice_address(
app: web.Application, user_id: UserID
) -> UserInvoiceAddress:
user_billing_details: UserBillingDetails = await _db.get_user_billing_details(
- get_database_engine(app), user_id=user_id
+ get_aiopg_engine(app), user_id=user_id
)
_user_billing_country = pycountry.countries.lookup(user_billing_details.country)
_user_billing_country_alpha_2_format = _user_billing_country.alpha_2
diff --git a/services/web/server/src/simcore_service_webserver/users/_db.py b/services/web/server/src/simcore_service_webserver/users/_db.py
index f7d8769f963f..1631705fef50 100644
--- a/services/web/server/src/simcore_service_webserver/users/_db.py
+++ b/services/web/server/src/simcore_service_webserver/users/_db.py
@@ -20,7 +20,7 @@
from simcore_service_webserver.users.exceptions import UserNotFoundError
from ..db.models import user_to_groups
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from .exceptions import BillingDetailsNotFoundError
from .schemas import Permission
@@ -67,7 +67,7 @@ async def list_user_permissions(
allowed=False,
)
with contextlib.suppress(GroupExtraPropertiesNotFoundError):
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
user_group_extra_properties = (
await GroupExtraPropertiesRepo.get_aggregated_properties_for_user(
conn, user_id=user_id, product_name=product_name
diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_api.py b/services/web/server/src/simcore_service_webserver/users/_preferences_api.py
index 8e17a4a25d43..915ea31650a3 100644
--- a/services/web/server/src/simcore_service_webserver/users/_preferences_api.py
+++ b/services/web/server/src/simcore_service_webserver/users/_preferences_api.py
@@ -19,7 +19,7 @@
GroupExtraPropertiesRepo,
)
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from . import _preferences_db
from ._preferences_models import (
ALL_FRONTEND_PREFERENCES,
@@ -75,7 +75,7 @@ async def get_frontend_user_preference(
async def get_frontend_user_preferences_aggregation(
app: web.Application, *, user_id: UserID, product_name: ProductName
) -> AggregatedPreferences:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
group_extra_properties = (
await GroupExtraPropertiesRepo.get_aggregated_properties_for_user(
conn, user_id=user_id, product_name=product_name
diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_db.py b/services/web/server/src/simcore_service_webserver/users/_preferences_db.py
index 45903403af90..0e902cc44d05 100644
--- a/services/web/server/src/simcore_service_webserver/users/_preferences_db.py
+++ b/services/web/server/src/simcore_service_webserver/users/_preferences_db.py
@@ -4,7 +4,7 @@
from models_library.users import UserID
from simcore_postgres_database.utils_user_preferences import FrontendUserPreferencesRepo
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
def _get_user_preference_name(user_id: UserID, preference_name: PreferenceName) -> str:
@@ -18,7 +18,7 @@ async def get_user_preference(
product_name: ProductName,
preference_class: type[FrontendUserPreference],
) -> FrontendUserPreference | None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
preference_payload: dict | None = await FrontendUserPreferencesRepo.load(
conn,
user_id=user_id,
@@ -42,7 +42,7 @@ async def set_user_preference(
product_name: ProductName,
preference: FrontendUserPreference,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
await FrontendUserPreferencesRepo.save(
conn,
user_id=user_id,
diff --git a/services/web/server/src/simcore_service_webserver/users/_tokens.py b/services/web/server/src/simcore_service_webserver/users/_tokens.py
index 3453309d2004..ed80bbf91a64 100644
--- a/services/web/server/src/simcore_service_webserver/users/_tokens.py
+++ b/services/web/server/src/simcore_service_webserver/users/_tokens.py
@@ -9,7 +9,7 @@
from sqlalchemy import and_, literal_column
from ..db.models import tokens
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from .exceptions import TokenNotFoundError
from .schemas import ThirdPartyToken, TokenCreate
@@ -17,7 +17,7 @@
async def create_token(
app: web.Application, user_id: UserID, token: TokenCreate
) -> ThirdPartyToken:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
await conn.execute(
tokens.insert().values(
user_id=user_id,
@@ -30,7 +30,7 @@ async def create_token(
async def list_tokens(app: web.Application, user_id: UserID) -> list[ThirdPartyToken]:
user_tokens: list[ThirdPartyToken] = []
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
async for row in conn.execute(
sa.select(tokens.c.token_data).where(tokens.c.user_id == user_id)
):
@@ -41,7 +41,7 @@ async def list_tokens(app: web.Application, user_id: UserID) -> list[ThirdPartyT
async def get_token(
app: web.Application, user_id: UserID, service_id: str
) -> ThirdPartyToken:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
sa.select(tokens.c.token_data).where(
and_(tokens.c.user_id == user_id, tokens.c.token_service == service_id)
@@ -55,7 +55,7 @@ async def get_token(
async def update_token(
app: web.Application, user_id: UserID, service_id: str, token_data: dict[str, str]
) -> ThirdPartyToken:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
sa.select(tokens.c.token_data, tokens.c.token_id).where(
(tokens.c.user_id == user_id) & (tokens.c.token_service == service_id)
@@ -82,7 +82,7 @@ async def update_token(
async def delete_token(app: web.Application, user_id: UserID, service_id: str) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
await conn.execute(
tokens.delete().where(
and_(tokens.c.user_id == user_id, tokens.c.token_service == service_id)
diff --git a/services/web/server/src/simcore_service_webserver/users/api.py b/services/web/server/src/simcore_service_webserver/users/api.py
index 7b913f937cf4..7c8afce001e9 100644
--- a/services/web/server/src/simcore_service_webserver/users/api.py
+++ b/services/web/server/src/simcore_service_webserver/users/api.py
@@ -20,7 +20,7 @@
from simcore_postgres_database.models.users import UserRole
from ..db.models import GroupType, groups, user_to_groups, users
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from ..groups.models import convert_groups_db_to_schema
from ..login.storage import AsyncpgStorage, get_plugin_storage
from ..security.api import clean_auth_policy_cache
@@ -47,7 +47,7 @@ async def get_user_profile(
:raises UserNotFoundError:
"""
- engine = get_database_engine(app)
+ engine = get_aiopg_engine(app)
user_profile: dict[str, Any] = {}
user_primary_group = all_group = {}
user_standard_groups = []
@@ -149,7 +149,7 @@ async def update_user_profile(
"""
user_id = _parse_as_user(user_id)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
to_update = update.dict(
include={
"first_name",
@@ -169,7 +169,7 @@ async def get_user_role(app: web.Application, user_id: UserID) -> UserRole:
"""
user_id = _parse_as_user(user_id)
- engine = get_database_engine(app)
+ engine = get_aiopg_engine(app)
async with engine.acquire() as conn:
user_role: RowProxy | None = await conn.scalar(
sa.select(users.c.role).where(users.c.id == user_id)
@@ -195,7 +195,7 @@ async def get_user_name_and_email(
(user, email)
"""
row = await _db.get_user_or_raise(
- get_database_engine(app),
+ get_aiopg_engine(app),
user_id=_parse_as_user(user_id),
return_column_names=["name", "email"],
)
@@ -221,7 +221,7 @@ async def get_user_display_and_id_names(
UserNotFoundError
"""
row = await _db.get_user_or_raise(
- get_database_engine(app),
+ get_aiopg_engine(app),
user_id=_parse_as_user(user_id),
return_column_names=["name", "email", "first_name", "last_name"],
)
@@ -234,7 +234,7 @@ async def get_user_display_and_id_names(
async def get_guest_user_ids_and_names(app: web.Application) -> list[tuple[int, str]]:
- engine = get_database_engine(app)
+ engine = get_aiopg_engine(app)
result: deque = deque()
async with engine.acquire() as conn:
async for row in conn.execute(
@@ -276,7 +276,7 @@ async def get_user_fullname(app: web.Application, user_id: UserID) -> FullNameDi
"""
user_id = _parse_as_user(user_id)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
sa.select(users.c.first_name, users.c.last_name).where(
users.c.id == user_id
@@ -296,12 +296,12 @@ async def get_user(app: web.Application, user_id: UserID) -> dict[str, Any]:
"""
:raises UserNotFoundError:
"""
- row = await _db.get_user_or_raise(engine=get_database_engine(app), user_id=user_id)
+ row = await _db.get_user_or_raise(engine=get_aiopg_engine(app), user_id=user_id)
return dict(row)
async def get_user_id_from_gid(app: web.Application, primary_gid: int) -> UserID:
- engine = get_database_engine(app)
+ engine = get_aiopg_engine(app)
async with engine.acquire() as conn:
user_id: UserID = await conn.scalar(
sa.select(users.c.id).where(users.c.primary_gid == primary_gid)
@@ -310,7 +310,7 @@ async def get_user_id_from_gid(app: web.Application, primary_gid: int) -> UserID
async def get_users_in_group(app: web.Application, gid: GroupID) -> set[UserID]:
- engine = get_database_engine(app)
+ engine = get_aiopg_engine(app)
async with engine.acquire() as conn:
return await _db.get_users_ids_in_group(conn, gid)
diff --git a/services/web/server/src/simcore_service_webserver/wallets/_db.py b/services/web/server/src/simcore_service_webserver/wallets/_db.py
index bea1610be373..ccadf72d8a1b 100644
--- a/services/web/server/src/simcore_service_webserver/wallets/_db.py
+++ b/services/web/server/src/simcore_service_webserver/wallets/_db.py
@@ -17,7 +17,7 @@
from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER
from sqlalchemy.sql import select
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from .errors import WalletAccessForbiddenError, WalletNotFoundError
_logger = logging.getLogger(__name__)
@@ -31,7 +31,7 @@ async def create_wallet(
description: str | None,
thumbnail: str | None,
) -> WalletDB:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
wallets.insert()
.values(
@@ -95,7 +95,7 @@ async def list_wallets_for_user(
)
)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(stmt)
rows = await result.fetchall() or []
output: list[UserWalletDB] = [parse_obj_as(UserWalletDB, row) for row in rows]
@@ -117,7 +117,7 @@ async def list_wallets_owned_by_user(
& (wallets.c.product_name == product_name)
)
)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
results = await conn.execute(stmt)
rows = await results.fetchall() or []
return [row.wallet_id for row in rows]
@@ -150,7 +150,7 @@ async def get_wallet_for_user(
)
)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(stmt)
row = await result.first()
if row is None:
@@ -180,7 +180,7 @@ async def get_wallet(
& (wallets.c.product_name == product_name)
)
)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(stmt)
row = await result.first()
if row is None:
@@ -197,7 +197,7 @@ async def update_wallet(
status: WalletStatus,
product_name: ProductName,
) -> WalletDB:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
wallets.update()
.values(
@@ -224,7 +224,7 @@ async def delete_wallet(
wallet_id: WalletID,
product_name: ProductName,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
await conn.execute(
wallets.delete().where(
(wallets.c.wallet_id == wallet_id)
diff --git a/services/web/server/src/simcore_service_webserver/wallets/_groups_db.py b/services/web/server/src/simcore_service_webserver/wallets/_groups_db.py
index f9d42cc6dddd..30bf375099bf 100644
--- a/services/web/server/src/simcore_service_webserver/wallets/_groups_db.py
+++ b/services/web/server/src/simcore_service_webserver/wallets/_groups_db.py
@@ -14,7 +14,7 @@
from sqlalchemy import func, literal_column
from sqlalchemy.sql import select
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from .errors import WalletGroupNotFoundError
_logger = logging.getLogger(__name__)
@@ -43,7 +43,7 @@ async def create_wallet_group(
write: bool,
delete: bool,
) -> WalletGroupGetDB:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
wallet_to_groups.insert()
.values(
@@ -78,7 +78,7 @@ async def list_wallet_groups(
.where(wallet_to_groups.c.wallet_id == wallet_id)
)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(stmt)
rows = await result.fetchall() or []
return parse_obj_as(list[WalletGroupGetDB], rows)
@@ -105,7 +105,7 @@ async def get_wallet_group(
)
)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(stmt)
row = await result.first()
if row is None:
@@ -124,7 +124,7 @@ async def update_wallet_group(
write: bool,
delete: bool,
) -> WalletGroupGetDB:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
wallet_to_groups.update()
.values(
@@ -151,7 +151,7 @@ async def delete_wallet_group(
wallet_id: WalletID,
group_id: GroupID,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
await conn.execute(
wallet_to_groups.delete().where(
(wallet_to_groups.c.wallet_id == wallet_id)
diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py b/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py
index daeba51ae805..33f9a6a22178 100644
--- a/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py
+++ b/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py
@@ -16,7 +16,7 @@
from sqlalchemy import func, literal_column
from sqlalchemy.sql import select
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from .errors import WorkspaceGroupNotFoundError
_logger = logging.getLogger(__name__)
@@ -48,7 +48,7 @@ async def create_workspace_group(
write: bool,
delete: bool,
) -> WorkspaceGroupGetDB:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
workspaces_access_rights.insert()
.values(
@@ -83,7 +83,7 @@ async def list_workspace_groups(
.where(workspaces_access_rights.c.workspace_id == workspace_id)
)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(stmt)
rows = await result.fetchall() or []
return [WorkspaceGroupGetDB.from_orm(row) for row in rows]
@@ -110,7 +110,7 @@ async def get_workspace_group(
)
)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(stmt)
row = await result.first()
if row is None:
@@ -129,7 +129,7 @@ async def update_workspace_group(
write: bool,
delete: bool,
) -> WorkspaceGroupGetDB:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
workspaces_access_rights.update()
.values(
@@ -156,7 +156,7 @@ async def delete_workspace_group(
workspace_id: WorkspaceID,
group_id: GroupID,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
await conn.execute(
workspaces_access_rights.delete().where(
(workspaces_access_rights.c.workspace_id == workspace_id)
diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py
index 23de15c3b192..f85a38d1df8f 100644
--- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py
+++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py
@@ -26,7 +26,7 @@
from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER
from sqlalchemy.sql import Subquery, select
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_aiopg_engine
from .errors import WorkspaceAccessForbiddenError, WorkspaceNotFoundError
_logger = logging.getLogger(__name__)
@@ -51,7 +51,7 @@ async def create_workspace(
description: str | None,
thumbnail: str | None,
) -> WorkspaceDB:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
workspaces.insert()
.values(
@@ -148,7 +148,7 @@ async def list_workspaces_for_user(
list_query = base_query.order_by(desc(getattr(workspaces.c, order_by.field)))
list_query = list_query.offset(offset).limit(limit)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
count_result = await conn.execute(count_query)
total_count = await count_result.scalar()
@@ -184,7 +184,7 @@ async def get_workspace_for_user(
)
)
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(base_query)
row = await result.first()
if row is None:
@@ -202,7 +202,7 @@ async def update_workspace(
thumbnail: str | None,
product_name: ProductName,
) -> WorkspaceDB:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
result = await conn.execute(
workspaces.update()
.values(
@@ -228,7 +228,7 @@ async def delete_workspace(
workspace_id: WorkspaceID,
product_name: ProductName,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with get_aiopg_engine(app).acquire() as conn:
await conn.execute(
workspaces.delete().where(
(workspaces.c.workspace_id == workspace_id)
diff --git a/services/web/server/tests/unit/isolated/test_products_model.py b/services/web/server/tests/unit/isolated/test_products_model.py
index 84fa67d94ebc..45ecbd0f4c37 100644
--- a/services/web/server/tests/unit/isolated/test_products_model.py
+++ b/services/web/server/tests/unit/isolated/test_products_model.py
@@ -50,7 +50,6 @@ def test_product_to_static():
"name": "ACME",
"url": "https://acme.com",
"license_url": "https://acme.com/license",
- "has_landing_page": False,
"invitation_form": True,
},
"issues": [
diff --git a/services/web/server/tests/unit/isolated/test_security__authz.py b/services/web/server/tests/unit/isolated/test_security__authz.py
index 5ba406db5109..b196d8429c33 100644
--- a/services/web/server/tests/unit/isolated/test_security__authz.py
+++ b/services/web/server/tests/unit/isolated/test_security__authz.py
@@ -246,7 +246,7 @@ async def test_check_access_expressions(access_model: RoleBasedAccessModel):
def mock_db(mocker: MockerFixture) -> MagicMock:
mocker.patch(
- "simcore_service_webserver.security._authz_policy.get_database_engine",
+ "simcore_service_webserver.security._authz_policy.get_aiopg_engine",
autospec=True,
return_value="FAKE-ENGINE",
)
diff --git a/services/web/server/tests/unit/isolated/test_security_api.py b/services/web/server/tests/unit/isolated/test_security_api.py
index f7e736204350..c9d281c6e746 100644
--- a/services/web/server/tests/unit/isolated/test_security_api.py
+++ b/services/web/server/tests/unit/isolated/test_security_api.py
@@ -242,7 +242,7 @@ async def basic_db_funs_mocked(client: TestClient, mocker: MockerFixture) -> Non
await clean_auth_policy_cache(client.app)
mocker.patch(
- "simcore_service_webserver.security._authz_policy.get_database_engine",
+ "simcore_service_webserver.security._authz_policy.get_aiopg_engine",
autospec=True,
)
diff --git a/services/web/server/tests/unit/with_dbs/01/notifications/test_notifications__db_comp_tasks_listening_task.py b/services/web/server/tests/unit/with_dbs/01/notifications/test_notifications__db_comp_tasks_listening_task.py
index dfea0bf713bf..49989b3fa313 100644
--- a/services/web/server/tests/unit/with_dbs/01/notifications/test_notifications__db_comp_tasks_listening_task.py
+++ b/services/web/server/tests/unit/with_dbs/01/notifications/test_notifications__db_comp_tasks_listening_task.py
@@ -17,7 +17,7 @@
from models_library.projects import ProjectAtDB, ProjectID
from pytest_mock.plugin import MockerFixture
from pytest_simcore.helpers.webserver_login import UserInfoDict
-from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY
+from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY
from simcore_postgres_database.models.comp_pipeline import StateType
from simcore_postgres_database.models.comp_tasks import NodeClass, comp_tasks
from simcore_postgres_database.models.users import UserRole
@@ -139,7 +139,7 @@ async def test_listen_comp_tasks_task(
task_class: NodeClass,
faker: Faker,
):
- db_engine: aiopg.sa.Engine = client.app[APP_DB_ENGINE_KEY]
+ db_engine: aiopg.sa.Engine = client.app[APP_AIOPG_ENGINE_KEY]
some_project = await project(logged_user)
pipeline(project_id=f"{some_project.uuid}")
task = comp_task(
diff --git a/services/web/server/tests/unit/with_dbs/01/test_db.py b/services/web/server/tests/unit/with_dbs/01/test_db.py
index e4f77f379e0a..7d59fb327f19 100644
--- a/services/web/server/tests/unit/with_dbs/01/test_db.py
+++ b/services/web/server/tests/unit/with_dbs/01/test_db.py
@@ -5,21 +5,74 @@
from pathlib import Path
+import aiopg.sa
+import asyncpg
+import sqlalchemy as sa
import yaml
from aiohttp.test_utils import TestServer
+from simcore_service_webserver.application_settings import (
+ ApplicationSettings,
+ get_application_settings,
+)
from simcore_service_webserver.db.plugin import (
+ get_aiopg_engine,
+ get_asyncpg_engine,
is_service_enabled,
is_service_responsive,
)
+from simcore_service_webserver.login.storage import AsyncpgStorage, get_plugin_storage
+from sqlalchemy.ext.asyncio import AsyncEngine
+
+
+async def test_all_pg_engines_in_app(web_server: TestServer):
+ app = web_server.app
+ assert app
+
+ settings: ApplicationSettings = get_application_settings(app)
+ assert settings.WEBSERVER_DB
+ assert settings.WEBSERVER_DB.POSTGRES_CLIENT_NAME
+
+ # (1) aiopg engine (deprecated)
+ aiopg_engine = get_aiopg_engine(app)
+ assert aiopg_engine
+ assert isinstance(aiopg_engine, aiopg.sa.Engine)
+
+ # (2) asyncpg engine via sqlalchemy.ext.asyncio (new)
+ asyncpg_engine: AsyncEngine = get_asyncpg_engine(app)
+ assert asyncpg_engine
+ assert isinstance(asyncpg_engine, AsyncEngine)
+
+ # (3) low-level asyncpg Pool (deprecated)
+ # Will be replaced by (2)
+ login_storage: AsyncpgStorage = get_plugin_storage(app)
+ assert login_storage.pool
+ assert isinstance(login_storage.pool, asyncpg.Pool)
+
+ # they ALL point to the SAME database
+ assert aiopg_engine.dsn
+ assert asyncpg_engine.url
+
+ query = sa.text('SELECT "version_num" FROM "alembic_version"')
+ async with login_storage.pool.acquire() as conn:
+ result_pool = await conn.fetchval(str(query))
+
+ async with asyncpg_engine.connect() as conn:
+ result_asyncpg = (await conn.execute(query)).scalar_one_or_none()
+
+ async with aiopg_engine.acquire() as conn:
+ result_aiopg = await (await conn.execute(query)).scalar()
+
+ assert result_pool == result_asyncpg
+ assert result_pool == result_aiopg
def test_uses_same_postgres_version(
docker_compose_file: Path, osparc_simcore_root_dir: Path
):
- with open(docker_compose_file) as fh:
+ with Path.open(docker_compose_file) as fh:
fixture = yaml.safe_load(fh)
- with open(osparc_simcore_root_dir / "services" / "docker-compose.yml") as fh:
+ with Path.open(osparc_simcore_root_dir / "services" / "docker-compose.yml") as fh:
expected = yaml.safe_load(fh)
assert (
diff --git a/services/web/server/tests/unit/with_dbs/01/test_groups_classifiers.py b/services/web/server/tests/unit/with_dbs/01/test_groups_classifiers.py
index 7425b466206a..b2fc82f44e69 100644
--- a/services/web/server/tests/unit/with_dbs/01/test_groups_classifiers.py
+++ b/services/web/server/tests/unit/with_dbs/01/test_groups_classifiers.py
@@ -7,7 +7,7 @@
import pytest
import sqlalchemy as sa
from servicelib.common_aiopg_utils import DataSourceName, create_pg_engine
-from simcore_service_webserver._constants import APP_DB_ENGINE_KEY
+from simcore_service_webserver._constants import APP_AIOPG_ENGINE_KEY
from simcore_service_webserver.groups._classifiers import GroupClassifierRepository
from sqlalchemy.sql import text
@@ -35,7 +35,7 @@ async def app(postgres_dsn: dict, inject_tables):
)
async with create_pg_engine(dsn) as engine:
- fake_app = {APP_DB_ENGINE_KEY: engine}
+ fake_app = {APP_AIOPG_ENGINE_KEY: engine}
yield fake_app
diff --git a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py b/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py
index f4bfac46564d..20cb885bdfa3 100644
--- a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py
+++ b/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py
@@ -23,7 +23,7 @@
)
from servicelib.aiohttp import status
from simcore_postgres_database.models.projects import projects
-from simcore_service_webserver._constants import APP_DB_ENGINE_KEY
+from simcore_service_webserver._constants import APP_AIOPG_ENGINE_KEY
from simcore_service_webserver.director_v2.api import get_project_run_policy
from simcore_service_webserver.meta_modeling._handlers import (
Page,
@@ -62,7 +62,7 @@ async def context_with_logged_user(client: TestClient, logged_user: UserInfoDict
yield
assert client.app
- engine = client.app[APP_DB_ENGINE_KEY]
+ engine = client.app[APP_AIOPG_ENGINE_KEY]
async with engine.acquire() as conn:
# cascade deletes everything except projects_vc_snapshot
await conn.execute(projects.delete())
diff --git a/services/web/server/tests/unit/with_dbs/03/products/test_products_db.py b/services/web/server/tests/unit/with_dbs/03/products/test_products_db.py
index 34b1165a7fb7..06022c2f9fa0 100644
--- a/services/web/server/tests/unit/with_dbs/03/products/test_products_db.py
+++ b/services/web/server/tests/unit/with_dbs/03/products/test_products_db.py
@@ -22,7 +22,7 @@
WebFeedback,
products,
)
-from simcore_service_webserver.db.plugin import APP_DB_ENGINE_KEY
+from simcore_service_webserver.db.plugin import APP_AIOPG_ENGINE_KEY
from simcore_service_webserver.products._db import ProductRepository
from simcore_service_webserver.products._middlewares import (
_get_app_default_product_name,
@@ -42,7 +42,7 @@ async def product_row(app: web.Application, product_data: dict[str, Any]) -> Row
Note that product_data is a SUBSET of product_row (e.g. modified dattimes etc)!
"""
- engine = app[APP_DB_ENGINE_KEY]
+ engine = app[APP_AIOPG_ENGINE_KEY]
assert engine
async with engine.acquire() as conn:
@@ -89,7 +89,6 @@ async def product_repository(
url="https://acme.com",
license_url="http://docs.acme.app/#/license-terms",
invitation_url="http://docs.acme.app/#/how-to-request-invitation",
- has_landing_page=False,
),
"issues": [
IssueTracker(
diff --git a/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py b/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py
index 710125717369..cf441325a6fd 100644
--- a/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py
+++ b/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py
@@ -5,7 +5,6 @@
from collections.abc import AsyncIterator, Callable, Iterator
-from http import HTTPStatus
from typing import Any
import pytest
@@ -27,7 +26,7 @@
from servicelib.aiohttp import status
from simcore_postgres_database.models.tags import tags
from simcore_service_webserver.db.models import UserRole
-from simcore_service_webserver.db.plugin import get_database_engine
+from simcore_service_webserver.db.plugin import get_aiopg_engine
from simcore_service_webserver.projects.models import ProjectDict
@@ -46,12 +45,16 @@ def fake_tags(faker: Faker) -> list[dict[str, Any]]:
]
-@pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)])
+@pytest.fixture
+def user_role() -> UserRole:
+ # All tests in test_tags assume USER's role
+ # i.e. Used in `logged_user` and `user_project`
+ return UserRole.USER
+
+
async def test_tags_to_studies(
client: TestClient,
- logged_user: UserInfoDict,
user_project: ProjectDict,
- expected: HTTPStatus,
fake_tags: dict[str, Any],
catalog_subsystem_mock: Callable[[list[ProjectDict]], None],
):
@@ -64,7 +67,7 @@ async def test_tags_to_studies(
for tag in fake_tags:
url = client.app.router["create_tag"].url_for()
resp = await client.post(f"{url}", json=tag)
- added_tag, _ = await assert_status(resp, expected)
+ added_tag, _ = await assert_status(resp, status.HTTP_200_OK)
added_tags.append(added_tag)
# Add tag to study
@@ -72,7 +75,7 @@ async def test_tags_to_studies(
project_uuid=user_project.get("uuid"), tag_id=str(added_tag.get("id"))
)
resp = await client.post(f"{url}")
- data, _ = await assert_status(resp, expected)
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
# Tag is included in response
assert added_tag["id"] in data["tags"]
@@ -86,7 +89,7 @@ async def test_tags_to_studies(
),
exclude_unset=True,
)
- data = await assert_get_same_project(client, user_project, expected)
+ data = await assert_get_same_project(client, user_project, status.HTTP_200_OK)
# Delete tag0
url = client.app.router["delete_tag"].url_for(tag_id=str(added_tags[0].get("id")))
@@ -95,7 +98,7 @@ async def test_tags_to_studies(
# Get project and check that tag is no longer there
user_project["tags"].remove(added_tags[0]["id"])
- data = await assert_get_same_project(client, user_project, expected)
+ data = await assert_get_same_project(client, user_project, status.HTTP_200_OK)
assert added_tags[0].get("id") not in data.get("tags")
# Remove tag1 from project
@@ -103,11 +106,11 @@ async def test_tags_to_studies(
project_uuid=user_project.get("uuid"), tag_id=str(added_tags[1].get("id"))
)
resp = await client.post(f"{url}")
- await assert_status(resp, expected)
+ await assert_status(resp, status.HTTP_200_OK)
# Get project and check that tag is no longer there
user_project["tags"].remove(added_tags[1]["id"])
- data = await assert_get_same_project(client, user_project, expected)
+ data = await assert_get_same_project(client, user_project, status.HTTP_200_OK)
assert added_tags[1].get("id") not in data.get("tags")
# Delete tag1
@@ -119,7 +122,7 @@ async def test_tags_to_studies(
@pytest.fixture
async def everybody_tag_id(client: TestClient) -> AsyncIterator[int]:
assert client.app
- engine = get_database_engine(client.app)
+ engine = get_aiopg_engine(client.app)
assert engine
async with engine.acquire() as conn:
@@ -139,11 +142,6 @@ async def everybody_tag_id(client: TestClient) -> AsyncIterator[int]:
await delete_tag(conn, tag_id=tag_id)
-@pytest.fixture
-def user_role() -> UserRole:
- return UserRole.USER
-
-
async def test_read_tags(
client: TestClient,
logged_user: UserInfoDict,
diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py b/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py
index 315f4884bc03..3b8284e14253 100644
--- a/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py
+++ b/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py
@@ -23,7 +23,7 @@
)
from servicelib.aiohttp import status
from simcore_postgres_database.models.users import UserRole
-from simcore_service_webserver.db.plugin import get_database_engine
+from simcore_service_webserver.db.plugin import get_aiopg_engine
@pytest.fixture
@@ -46,7 +46,7 @@ async def tokens_db_cleanup(
client: TestClient,
) -> AsyncIterator[None]:
assert client.app
- engine = get_database_engine(client.app)
+ engine = get_aiopg_engine(client.app)
yield None
@@ -73,7 +73,7 @@ async def fake_tokens(
"token_secret": faker.md5(raw_output=False),
}
await create_token_in_db(
- get_database_engine(client.app),
+ get_aiopg_engine(client.app),
user_id=logged_user["id"],
token_service=data["service"],
token_data=data,
@@ -113,7 +113,7 @@ async def test_create_token(
data, error = await assert_status(resp, expected)
if not error:
db_token = await get_token_from_db(
- get_database_engine(client.app), token_data=token
+ get_aiopg_engine(client.app), token_data=token
)
assert db_token
assert db_token["token_data"] == token
@@ -187,5 +187,5 @@ async def test_delete_token(
if not error:
assert not (
- await get_token_from_db(get_database_engine(client.app), token_service=sid)
+ await get_token_from_db(get_aiopg_engine(client.app), token_service=sid)
)
diff --git a/services/web/server/tests/unit/with_dbs/03/test_users_api.py b/services/web/server/tests/unit/with_dbs/03/test_users_api.py
index 28b70592ce8b..89b5ddea4747 100644
--- a/services/web/server/tests/unit/with_dbs/03/test_users_api.py
+++ b/services/web/server/tests/unit/with_dbs/03/test_users_api.py
@@ -11,7 +11,7 @@
from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict
from pytest_simcore.helpers.webserver_login import NewUser
from servicelib.aiohttp import status
-from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY
+from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY
from simcore_postgres_database.models.users import UserStatus
from simcore_service_webserver.users.api import (
get_user_name_and_email,
@@ -67,7 +67,7 @@ async def _rq_login():
await assert_status(r1, status.HTTP_200_OK)
# apply update
- expired = await update_expired_users(client.app[APP_DB_ENGINE_KEY])
+ expired = await update_expired_users(client.app[APP_AIOPG_ENGINE_KEY])
if has_expired:
assert expired == [user["id"]]
else:
diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py b/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py
index 64c0052efd65..7343176760e8 100644
--- a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py
+++ b/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py
@@ -30,7 +30,7 @@
)
from simcore_service_webserver._meta import API_VTAG as VX
from simcore_service_webserver.db.models import UserRole
-from simcore_service_webserver.db.plugin import APP_DB_ENGINE_KEY
+from simcore_service_webserver.db.plugin import APP_AIOPG_ENGINE_KEY
from simcore_service_webserver.log import setup_logging
from simcore_service_webserver.projects.models import ProjectDict
from tenacity.asyncio import AsyncRetrying
@@ -159,7 +159,7 @@ async def user_project(
# cleanup repos
assert client.app
- engine = client.app[APP_DB_ENGINE_KEY]
+ engine = client.app[APP_AIOPG_ENGINE_KEY]
async with engine.acquire() as conn:
# cascade deletes everything except projects_vc_snapshot
await conn.execute(projects_vc_repos.delete())
diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py
index ba3bed4b5938..fee6b9352e1b 100644
--- a/services/web/server/tests/unit/with_dbs/conftest.py
+++ b/services/web/server/tests/unit/with_dbs/conftest.py
@@ -41,12 +41,13 @@
from pydantic import ByteSize, parse_obj_as
from pytest_mock import MockerFixture
from pytest_simcore.helpers.dict_tools import ConfigDict
+from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict
from pytest_simcore.helpers.typing_env import EnvVarsDict
from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict
from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem
from pytest_simcore.helpers.webserver_projects import NewProject
from redis import Redis
-from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY
+from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY
from servicelib.aiohttp.long_running_tasks.client import LRTask
from servicelib.aiohttp.long_running_tasks.server import ProgressPercent, TaskProgress
from servicelib.common_aiopg_utils import DSN
@@ -58,7 +59,7 @@
from simcore_postgres_database.utils_products import get_default_product_name
from simcore_service_webserver._constants import INDEX_RESOURCE_NAME
from simcore_service_webserver.application import create_application
-from simcore_service_webserver.db.plugin import get_database_engine
+from simcore_service_webserver.db.plugin import get_aiopg_engine
from simcore_service_webserver.groups.api import (
add_user_in_group,
create_user_group,
@@ -127,6 +128,7 @@ def app_cfg(default_app_cfg: ConfigDict, unused_tcp_port_factory) -> ConfigDict:
def app_environment(
app_cfg: ConfigDict,
monkeypatch_setenv_from_app_config: Callable[[ConfigDict], dict[str, str]],
+ monkeypatch: pytest.MonkeyPatch,
) -> EnvVarsDict:
# WARNING: this fixture is commonly overriden. Check before renaming.
"""overridable fixture that defines the ENV for the webserver application
@@ -140,7 +142,12 @@ def app_environment(app_environment: dict[str, str], monkeypatch: pytest.MonkeyP
"""
print("+ web_server:")
cfg = deepcopy(app_cfg)
- return monkeypatch_setenv_from_app_config(cfg)
+
+ envs = monkeypatch_setenv_from_app_config(cfg)
+ # NOTE: this emulates hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" in docker-compose that
+ # affects PostgresSettings.POSTGRES_CLIENT_NAME
+ extra = setenvs_from_dict(monkeypatch, {"HOSTNAME": "webserver_test_host.0"})
+ return envs | extra
@pytest.fixture
@@ -193,7 +200,7 @@ def web_server(
assert isinstance(postgres_db, sa.engine.Engine)
- pg_settings = dict(e.split("=") for e in app[APP_DB_ENGINE_KEY].dsn.split())
+ pg_settings = dict(e.split("=") for e in app[APP_AIOPG_ENGINE_KEY].dsn.split())
assert pg_settings["host"] == postgres_db.url.host
assert int(pg_settings["port"]) == postgres_db.url.port
assert pg_settings["user"] == postgres_db.url.username
@@ -225,7 +232,7 @@ def osparc_product_name() -> str:
@pytest.fixture
async def default_product_name(client: TestClient) -> ProductName:
assert client.app
- async with get_database_engine(client.app).acquire() as conn:
+ async with get_aiopg_engine(client.app).acquire() as conn:
return await get_default_product_name(conn)