Skip to content

Commit 45be582

Browse files
Merge branch 'master' into fix/debt-msg
2 parents b2c0daa + e042a69 commit 45be582

File tree

24 files changed

+255
-181
lines changed

24 files changed

+255
-181
lines changed

.github/mergify.yml

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
pull_request_rules:
2+
- name: Automatic merge
3+
description: Conditions required for automatic merging of a PR
4+
conditions:
5+
# let Mergify know that the PR can be merged (added manually)
6+
- label = 🤖-automerge
7+
# block Mergify from merging the PR (added manually)
8+
- label != 🤖-do-not-merge
9+
10+
# list of CI checks that need to pass in order for the PR to be merged
11+
- check-success=unit-tests
12+
- check-success=integration-tests
13+
- check-success=system-tests
14+
15+
# Check for required reviews
16+
- "#approved-reviews-by>=2" # Requires 2 approving reviews
17+
- "#changes-requested-reviews-by=0" # No changes requested
18+
- "#review-threads-unresolved=0" # All review threads resolved
19+
20+
# Optional but recommended checks
21+
- base=master
22+
- -draft # PR is not in draft state
23+
- -conflict # No merge conflicts
24+
actions:
25+
merge:
26+
27+
- name: automatic update for PR ready to be merged
28+
conditions:
29+
- -draft
30+
- label=🤖-automerge
31+
actions:
32+
update:
33+
34+
- name: retry CI on failure
35+
conditions:
36+
- base=master
37+
- or:
38+
- check-failure=unit-tests
39+
- check-failure=integration-tests
40+
- check-failure=system-tests
41+
actions:
42+
github_actions:
43+
workflow:
44+
dispatch:
45+
- workflow: .github/workflows/ci-testing-deploy.yml
46+

.ruff.toml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11

2-
select = [
2+
lint.select = [
33
"A", # [https://pypi.org/project/flake8-builtins/]
44
"ARG", # [https://pypi.org/project/flake8-unused-arguments/]
55
"ASYNC", # [https://pypi.org/project/flake8-async/]
@@ -40,7 +40,7 @@ select = [
4040
"W", # [https://pypi.org/project/pycodestyle/] warnings
4141
"YTT", # [https://pypi.org/project/flake8-2020/]
4242
]
43-
ignore = [
43+
lint.ignore = [
4444
"E501", # line too long, handled by black
4545
"S101", # use of `assert` detected hanbled by pylance, does not support noseq
4646
"TID252", # [*] Relative imports from parent modules are banned
@@ -50,7 +50,7 @@ ignore = [
5050
target-version = "py311"
5151

5252

53-
[per-file-ignores]
53+
[lint.per-file-ignores]
5454
"**/{tests,pytest_simcore}/**" = [
5555
"T201", # print found
5656
"ARG001", # unused function argument
@@ -64,10 +64,10 @@ target-version = "py311"
6464
"FBT001", # Boolean positional arg in function definition
6565
]
6666

67-
[flake8-pytest-style]
67+
[lint.flake8-pytest-style]
6868
fixture-parentheses = false
6969
parametrize-names-type = "csv"
7070

7171

72-
[pylint]
72+
[lint.pylint]
7373
max-args = 10

packages/postgres-database/tests/conftest.py

Lines changed: 26 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -5,19 +5,21 @@
55

66
import uuid
77
import warnings
8-
from collections.abc import AsyncIterator, Awaitable, Callable, Iterator
8+
from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Iterator
99
from pathlib import Path
1010

1111
import aiopg.sa
1212
import aiopg.sa.exc
1313
import pytest
1414
import simcore_postgres_database.cli
1515
import sqlalchemy as sa
16+
import sqlalchemy.engine
1617
import yaml
1718
from aiopg.sa.connection import SAConnection
1819
from aiopg.sa.engine import Engine
1920
from aiopg.sa.result import ResultProxy, RowProxy
2021
from faker import Faker
22+
from pytest_simcore.helpers import postgres_tools
2123
from pytest_simcore.helpers.faker_factories import (
2224
random_group,
2325
random_project,
@@ -71,20 +73,15 @@ def postgres_service(docker_services, docker_ip, docker_compose_file) -> str:
7173
return dsn
7274

7375

74-
@pytest.fixture
75-
def make_engine(
76-
postgres_service: str,
77-
) -> Callable[[bool], Awaitable[Engine] | sa.engine.base.Engine]:
78-
dsn = postgres_service
79-
80-
def _make(is_async=True) -> Awaitable[Engine] | sa.engine.base.Engine:
81-
return aiopg.sa.create_engine(dsn) if is_async else sa.create_engine(dsn)
82-
83-
return _make
76+
@pytest.fixture(scope="session")
77+
def sync_engine(postgres_service: str) -> Iterable[sqlalchemy.engine.Engine]:
78+
_engine: sqlalchemy.engine.Engine = sa.create_engine(url=postgres_service)
79+
yield _engine
80+
_engine.dispose()
8481

8582

8683
@pytest.fixture
87-
def make_asyncpg_engine(postgres_service: str) -> Callable[[bool], AsyncEngine]:
84+
def _make_asyncpg_engine(postgres_service: str) -> Callable[[bool], AsyncEngine]:
8885
# NOTE: users is responsible of `await engine.dispose()`
8986
dsn = postgres_service.replace("postgresql://", "postgresql+asyncpg://")
9087
minsize = 1
@@ -127,10 +124,10 @@ def db_metadata() -> sa.MetaData:
127124

128125
@pytest.fixture(params=["sqlModels", "alembicMigration"])
129126
def pg_sa_engine(
130-
make_engine: Callable,
127+
sync_engine: sqlalchemy.engine.Engine,
131128
db_metadata: sa.MetaData,
132129
request: pytest.FixtureRequest,
133-
) -> Iterator[sa.engine.Engine]:
130+
) -> Iterator[sqlalchemy.engine.Engine]:
134131
"""
135132
Runs migration to create tables and return a sqlalchemy engine
136133
@@ -144,7 +141,6 @@ def pg_sa_engine(
144141
# the tables, i.e. when no migration mechanism are in place
145142
# Best is therefore to start from scratch and delete all at
146143
# the end
147-
sync_engine = make_engine(is_async=False)
148144

149145
# NOTE: ALL is deleted before
150146
db_metadata.drop_all(sync_engine)
@@ -165,22 +161,20 @@ def pg_sa_engine(
165161

166162
yield sync_engine
167163

168-
# NOTE: ALL is deleted after
169-
with sync_engine.begin() as conn:
170-
conn.execute(sa.DDL("DROP TABLE IF EXISTS alembic_version"))
171-
db_metadata.drop_all(sync_engine)
172-
sync_engine.dispose()
164+
postgres_tools.force_drop_all_tables(sync_engine)
173165

174166

175167
@pytest.fixture
176168
async def aiopg_engine(
177-
pg_sa_engine: sa.engine.Engine, make_engine: Callable
169+
pg_sa_engine: sqlalchemy.engine.Engine,
170+
postgres_service: str,
178171
) -> AsyncIterator[Engine]:
179172
"""
180173
Return an aiopg.sa engine connected to a responsive and migrated pg database
181174
"""
182-
183-
aiopg_sa_engine = await make_engine(is_async=True)
175+
# first start sync
176+
assert pg_sa_engine.url.database
177+
assert postgres_service.endswith(pg_sa_engine.url.database)
184178

185179
warnings.warn(
186180
"The 'aiopg_engine' is deprecated since we are replacing `aiopg` library by `sqlalchemy.ext.asyncio`."
@@ -190,12 +184,8 @@ async def aiopg_engine(
190184
stacklevel=2,
191185
)
192186

193-
yield aiopg_sa_engine
194-
195-
# closes async-engine connections and terminates
196-
aiopg_sa_engine.close()
197-
await aiopg_sa_engine.wait_closed()
198-
aiopg_sa_engine.terminate()
187+
async with aiopg.sa.create_engine(dsn=postgres_service) as aiopg_sa_engine:
188+
yield aiopg_sa_engine
199189

200190

201191
@pytest.fixture
@@ -208,15 +198,15 @@ async def connection(aiopg_engine: Engine) -> AsyncIterator[SAConnection]:
208198
@pytest.fixture
209199
async def asyncpg_engine( # <-- WE SHOULD USE THIS ONE
210200
is_pdb_enabled: bool,
211-
pg_sa_engine: sa.engine.Engine,
212-
make_asyncpg_engine: Callable[[bool], AsyncEngine],
201+
pg_sa_engine: sqlalchemy.engine.Engine,
202+
_make_asyncpg_engine: Callable[[bool], AsyncEngine],
213203
) -> AsyncIterator[AsyncEngine]:
214204

215205
assert (
216206
pg_sa_engine
217207
), "Ensures pg db up, responsive, init (w/ tables) and/or migrated"
218208

219-
_apg_engine = make_asyncpg_engine(is_pdb_enabled)
209+
_apg_engine = _make_asyncpg_engine(is_pdb_enabled)
220210

221211
yield _apg_engine
222212

@@ -229,9 +219,7 @@ async def asyncpg_engine( # <-- WE SHOULD USE THIS ONE
229219

230220

231221
@pytest.fixture
232-
def create_fake_group(
233-
make_engine: Callable[..., Awaitable[Engine] | sa.engine.base.Engine]
234-
) -> Iterator[Callable]:
222+
def create_fake_group(sync_engine: sqlalchemy.engine.Engine) -> Iterator[Callable]:
235223
"""factory to create standard group"""
236224
created_ids = []
237225

@@ -250,16 +238,13 @@ async def _creator(conn: SAConnection, **overrides) -> RowProxy:
250238

251239
yield _creator
252240

253-
sync_engine = make_engine(is_async=False)
254-
assert isinstance(sync_engine, sa.engine.Engine)
241+
assert isinstance(sync_engine, sqlalchemy.engine.Engine)
255242
with sync_engine.begin() as conn:
256243
conn.execute(sa.delete(groups).where(groups.c.gid.in_(created_ids)))
257244

258245

259246
@pytest.fixture
260-
def create_fake_user(
261-
make_engine: Callable[..., Awaitable[Engine] | sa.engine.base.Engine]
262-
) -> Iterator[Callable]:
247+
def create_fake_user(sync_engine: sqlalchemy.engine.Engine) -> Iterator[Callable]:
263248
"""factory to create a user w/ or w/o a standard group"""
264249

265250
created_ids = []
@@ -290,8 +275,7 @@ async def _creator(conn, group: RowProxy | None = None, **overrides) -> RowProxy
290275

291276
yield _creator
292277

293-
sync_engine = make_engine(is_async=False)
294-
assert isinstance(sync_engine, sa.engine.Engine)
278+
assert isinstance(sync_engine, sqlalchemy.engine.Engine)
295279
with sync_engine.begin() as conn:
296280
conn.execute(users.delete().where(users.c.id.in_(created_ids)))
297281

packages/postgres-database/tests/test_models_groups.py

Lines changed: 1 addition & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -3,18 +3,14 @@
33
# pylint: disable=unused-variable
44
# pylint: disable=too-many-arguments
55

6-
7-
from collections.abc import AsyncIterator, Awaitable, Callable
6+
from collections.abc import Callable
87

98
import aiopg.sa.exc
109
import pytest
11-
import sqlalchemy as sa
1210
from aiopg.sa.connection import SAConnection
13-
from aiopg.sa.engine import Engine
1411
from aiopg.sa.result import ResultProxy, RowProxy
1512
from psycopg2.errors import ForeignKeyViolation, RaiseException, UniqueViolation
1613
from pytest_simcore.helpers.faker_factories import random_user
17-
from simcore_postgres_database.models.base import metadata
1814
from simcore_postgres_database.webserver_models import (
1915
GroupType,
2016
groups,
@@ -24,21 +20,6 @@
2420
from sqlalchemy import func, literal_column, select
2521

2622

27-
@pytest.fixture
28-
async def connection(
29-
make_engine: Callable[[bool], Awaitable[Engine] | sa.engine.base.Engine]
30-
) -> AsyncIterator[SAConnection]:
31-
engine = await make_engine()
32-
sync_engine = make_engine(is_async=False)
33-
metadata.drop_all(sync_engine)
34-
metadata.create_all(sync_engine)
35-
36-
async with engine.acquire() as conn:
37-
yield conn
38-
39-
metadata.drop_all(sync_engine)
40-
41-
4223
async def test_user_group_uniqueness(
4324
connection: SAConnection,
4425
create_fake_group: Callable,

packages/postgres-database/tests/test_uniqueness_in_comp_tasks.py

Lines changed: 17 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,18 @@
1-
# pylint:disable=no-value-for-parameter
2-
# pylint:disable=unused-variable
3-
# pylint:disable=unused-argument
4-
# pylint:disable=redefined-outer-name
1+
# pylint: disable=redefined-outer-name
2+
# pylint: disable=unused-argument
3+
# pylint: disable=unused-variable
4+
# pylint: disable=too-many-arguments
55

66
import json
7+
from collections.abc import AsyncIterator
78

9+
import aiopg.sa.engine
10+
import aiopg.sa.exc
811
import pytest
912
import sqlalchemy as sa
13+
import sqlalchemy.engine
1014
from psycopg2.errors import UniqueViolation # pylint: disable=no-name-in-module
15+
from pytest_simcore.helpers import postgres_tools
1116
from pytest_simcore.helpers.faker_factories import fake_pipeline, fake_task_factory
1217
from simcore_postgres_database.models.base import metadata
1318
from simcore_postgres_database.webserver_models import comp_pipeline, comp_tasks
@@ -16,24 +21,25 @@
1621

1722

1823
@pytest.fixture
19-
async def engine(make_engine):
20-
engine = await make_engine()
21-
sync_engine = make_engine(is_async=False)
24+
async def engine(
25+
sync_engine: sqlalchemy.engine.Engine,
26+
aiopg_engine: aiopg.sa.engine.Engine,
27+
) -> AsyncIterator[aiopg.sa.engine.Engine]:
28+
2229
metadata.drop_all(sync_engine)
2330
metadata.create_all(sync_engine)
2431

25-
async with engine.acquire() as conn:
32+
async with aiopg_engine.acquire() as conn:
2633
await conn.execute(
2734
comp_pipeline.insert().values(**fake_pipeline(project_id="PA"))
2835
)
2936
await conn.execute(
3037
comp_pipeline.insert().values(**fake_pipeline(project_id="PB"))
3138
)
3239

33-
yield engine
40+
yield aiopg_engine
3441

35-
engine.close()
36-
await engine.wait_closed()
42+
postgres_tools.force_drop_all_tables(sync_engine)
3743

3844

3945
async def test_unique_project_node_pairs(engine):

packages/postgres-database/tests/test_utils_migration.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,10 @@
33
# pylint: disable=unused-argument
44
# pylint: disable=unused-variable
55

6-
from collections.abc import Callable
76

87
import pytest
98
import simcore_postgres_database.cli
9+
import sqlalchemy.engine
1010
from alembic.script.revision import MultipleHeads
1111
from simcore_postgres_database.utils_migration import get_current_head
1212
from sqlalchemy import inspect
@@ -23,8 +23,8 @@ def test_migration_has_no_branches():
2323
)
2424

2525

26-
def test_migration_upgrade_downgrade(make_engine: Callable):
27-
sync_engine = make_engine(is_async=False)
26+
def test_migration_upgrade_downgrade(sync_engine: sqlalchemy.engine.Engine):
27+
2828
assert sync_engine
2929
assert simcore_postgres_database.cli.discover.callback
3030
assert simcore_postgres_database.cli.upgrade.callback

0 commit comments

Comments
 (0)