Skip to content

Commit c5a12a0

Browse files
Merge remote-tracking branch 'upstream/master' into is5646/use-project-nodes-table-instead-of-workbench
2 parents 6e17b80 + 3472e17 commit c5a12a0

File tree

79 files changed

+1319
-195
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

79 files changed

+1319
-195
lines changed

packages/models-library/src/models_library/api_schemas_webserver/projects.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -274,6 +274,7 @@ class ProjectPatch(InputSchema):
274274
] = None
275275
quality: dict[str, Any] | None = None
276276
template_type: ProjectTemplateType | None = None
277+
hidden: bool | None = None
277278

278279
def to_domain_model(self) -> dict[str, Any]:
279280
return self.model_dump(exclude_unset=True, by_alias=False)

packages/models-library/src/models_library/rpc/webserver/projects.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,7 @@ class ProjectJobRpcGet(BaseModel):
9090

9191
# Specific to jobs
9292
job_parent_resource_name: str
93+
storage_assets_deleted: bool
9394

9495
@staticmethod
9596
def _update_json_schema_extra(schema: JsonDict) -> None:
@@ -105,6 +106,7 @@ def _update_json_schema_extra(schema: JsonDict) -> None:
105106
"created_at": "2023-01-01T00:00:00Z",
106107
"modified_at": "2023-01-01T00:00:00Z",
107108
"job_parent_resource_name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.2",
109+
"storage_assets_deleted": "false",
108110
},
109111
{
110112
"uuid": "00000000-1234-5678-1234-123456789012",
@@ -114,6 +116,7 @@ def _update_json_schema_extra(schema: JsonDict) -> None:
114116
"created_at": "2023-02-01T00:00:00Z",
115117
"modified_at": "2023-02-01T00:00:00Z",
116118
"job_parent_resource_name": "studies/96642f2a-a72c-11ef-8776-02420a00087d",
119+
"storage_assets_deleted": "true",
117120
},
118121
{
119122
"uuid": "00000000-0000-5678-1234-123456789012",
@@ -123,6 +126,7 @@ def _update_json_schema_extra(schema: JsonDict) -> None:
123126
"created_at": "2023-03-01T00:00:00Z",
124127
"modified_at": "2023-03-01T00:00:00Z",
125128
"job_parent_resource_name": "program/simcore%2Fservices%2Fdynamic%2Fjupyter/releases/5.0.2",
129+
"storage_assets_deleted": "false",
126130
},
127131
]
128132
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
"""introduce data_deleted in projects_to_jobs table
2+
3+
Revision ID: 5b998370916a
4+
Revises: 5679165336c8
5+
Create Date: 2025-08-11 13:58:38.424398+00:00
6+
7+
"""
8+
9+
import sqlalchemy as sa
10+
from alembic import op
11+
12+
# revision identifiers, used by Alembic.
13+
revision = "5b998370916a"
14+
down_revision = "5679165336c8"
15+
branch_labels = None
16+
depends_on = None
17+
18+
19+
def upgrade():
20+
# ### commands auto generated by Alembic - please adjust! ###
21+
op.add_column(
22+
"projects_to_jobs",
23+
sa.Column("storage_assets_deleted", sa.Boolean(), nullable=True),
24+
)
25+
26+
op.execute("UPDATE projects_to_jobs SET storage_assets_deleted = false")
27+
28+
op.alter_column(
29+
"projects_to_jobs",
30+
"storage_assets_deleted",
31+
existing_type=sa.BOOLEAN(),
32+
nullable=False,
33+
)
34+
# ### end Alembic commands ###
35+
36+
37+
def downgrade():
38+
# ### commands auto generated by Alembic - please adjust! ###
39+
op.drop_column("projects_to_jobs", "storage_assets_deleted")
40+
# ### end Alembic commands ###

packages/postgres-database/src/simcore_postgres_database/models/projects_to_jobs.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,12 @@
2828
"the relative resource name is shelves/shelf1/jobs/job2, "
2929
"the parent resource name is shelves/shelf1.",
3030
),
31+
sa.Column(
32+
"storage_assets_deleted",
33+
sa.Boolean,
34+
nullable=False,
35+
doc="Indicates whether the job's S3 assets have been actively deleted.",
36+
),
3137
# Composite key (project_uuid, job_parent_resource_name) uniquely identifies very row
3238
sa.UniqueConstraint(
3339
"project_uuid",

packages/pytest-simcore/src/pytest_simcore/helpers/webserver_rpc_server.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,14 @@
2626
class WebserverRpcSideEffects:
2727
# pylint: disable=no-self-use
2828

29+
def __init__(
30+
self,
31+
project_job_rpc_get: ProjectJobRpcGet = ProjectJobRpcGet.model_validate(
32+
ProjectJobRpcGet.model_json_schema()["examples"][0]
33+
),
34+
):
35+
self.project_job_rpc_get = project_job_rpc_get
36+
2937
@validate_call(config={"arbitrary_types_allowed": True})
3038
async def mark_project_as_job(
3139
self,
@@ -35,12 +43,14 @@ async def mark_project_as_job(
3543
user_id: UserID,
3644
project_uuid: ProjectID,
3745
job_parent_resource_name: str,
46+
storage_assets_deleted: bool,
3847
) -> None:
3948
assert rpc_client
4049

4150
assert not job_parent_resource_name.startswith("/") # nosec
4251
assert "/" in job_parent_resource_name # nosec
4352
assert not job_parent_resource_name.endswith("/") # nosec
53+
assert isinstance(storage_assets_deleted, bool)
4454

4555
assert product_name
4656
assert user_id
@@ -84,3 +94,25 @@ async def list_projects_marked_as_jobs(
8494
limit=limit,
8595
offset=offset,
8696
)
97+
98+
@validate_call(config={"arbitrary_types_allowed": True})
99+
async def get_project_marked_as_job(
100+
self,
101+
rpc_client: RabbitMQRPCClient | MockType,
102+
*,
103+
product_name: ProductName,
104+
user_id: UserID,
105+
project_uuid: ProjectID,
106+
job_parent_resource_name: str,
107+
) -> ProjectJobRpcGet:
108+
assert rpc_client
109+
assert product_name
110+
assert user_id
111+
assert project_uuid
112+
assert job_parent_resource_name
113+
114+
# Return a valid example from the schema
115+
_data = self.project_job_rpc_get.model_dump()
116+
_data["uuid"] = str(project_uuid)
117+
_data["job_parent_resource_name"] = job_parent_resource_name
118+
return ProjectJobRpcGet.model_validate(_data)

packages/service-library/src/servicelib/aiohttp/db_asyncpg_engine.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -38,23 +38,22 @@ def get_async_engine(app: web.Application) -> AsyncEngine:
3838
return engine
3939

4040

41-
async def connect_to_db(app: web.Application, settings: PostgresSettings) -> None:
41+
async def connect_to_db(
42+
app: web.Application, settings: PostgresSettings, application_name: str
43+
) -> None:
4244
"""
4345
- db services up, data migrated and ready to use
4446
- sets an engine in app state (use `get_async_engine(app)` to retrieve)
4547
"""
46-
if settings.POSTGRES_CLIENT_NAME:
47-
settings = settings.model_copy(
48-
update={"POSTGRES_CLIENT_NAME": settings.POSTGRES_CLIENT_NAME + "-asyncpg"}
49-
)
50-
5148
with log_context(
5249
_logger,
5350
logging.INFO,
5451
"Connecting app[APP_DB_ASYNC_ENGINE_KEY] to postgres with %s",
5552
f"{settings=}",
5653
):
57-
engine = await create_async_engine_and_database_ready(settings)
54+
engine = await create_async_engine_and_database_ready(
55+
settings, application_name
56+
)
5857
_set_async_engine_to_app_state(app, engine)
5958

6059
_logger.info(
Lines changed: 25 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,35 @@
11
import asyncio
22
import logging
33
from collections.abc import Sequence
4-
from typing import Any
4+
from typing import Any, Final
55

66
from aiodocker import Docker, DockerError
77
from aiodocker.execs import Exec
88
from aiodocker.stream import Stream
9+
from common_library.errors_classes import OsparcErrorMixin
910
from pydantic import NonNegativeFloat
10-
from starlette import status
1111

12-
from ..core.errors import (
13-
ContainerExecCommandFailedError,
14-
ContainerExecContainerNotFoundError,
15-
ContainerExecTimeoutError,
16-
)
12+
13+
class BaseContainerUtilsError(OsparcErrorMixin, Exception):
14+
pass
15+
16+
17+
class ContainerExecContainerNotFoundError(BaseContainerUtilsError):
18+
msg_template = "Container '{container_name}' was not found"
19+
20+
21+
class ContainerExecTimeoutError(BaseContainerUtilsError):
22+
msg_template = "Timed out after {timeout} while executing: '{command}'"
23+
24+
25+
class ContainerExecCommandFailedError(BaseContainerUtilsError):
26+
msg_template = (
27+
"Command '{command}' exited with code '{exit_code}'"
28+
"and output: '{command_result}'"
29+
)
30+
31+
32+
_HTTP_404_NOT_FOUND: Final[int] = 404
1733

1834
_logger = logging.getLogger(__name__)
1935

@@ -77,10 +93,10 @@ async def run_command_in_container(
7793
_execute_command(container_name, command), timeout
7894
)
7995
except DockerError as e:
80-
if e.status == status.HTTP_404_NOT_FOUND:
96+
if e.status == _HTTP_404_NOT_FOUND:
8197
raise ContainerExecContainerNotFoundError(
8298
container_name=container_name
8399
) from e
84100
raise
85-
except asyncio.TimeoutError as e:
101+
except TimeoutError as e:
86102
raise ContainerExecTimeoutError(timeout=timeout, command=command) from e

packages/service-library/src/servicelib/db_asyncpg_utils.py

Lines changed: 12 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
@retry(**PostgresRetryPolicyUponInitialization(_logger).kwargs)
2020
async def create_async_engine_and_database_ready(
21-
settings: PostgresSettings,
21+
settings: PostgresSettings, application_name: str
2222
) -> AsyncEngine:
2323
"""
2424
- creates asyncio engine
@@ -31,15 +31,11 @@ async def create_async_engine_and_database_ready(
3131
)
3232

3333
server_settings = {
34-
"jit": "off"
35-
} # see https://docs.sqlalchemy.org/en/20/dialects/postgresql.html#disabling-the-postgresql-jit-to-improve-enum-datatype-handling
36-
if settings.POSTGRES_CLIENT_NAME:
37-
assert isinstance(settings.POSTGRES_CLIENT_NAME, str) # nosec
38-
server_settings.update(
39-
{
40-
"application_name": settings.POSTGRES_CLIENT_NAME,
41-
}
42-
)
34+
"jit": "off",
35+
"application_name": settings.client_name(
36+
f"{application_name}", suffix="asyncpg"
37+
),
38+
}
4339

4440
engine = create_async_engine(
4541
settings.dsn_with_async_sqlalchemy,
@@ -75,7 +71,7 @@ async def check_postgres_liveness(engine: AsyncEngine) -> LivenessResult:
7571

7672
@contextlib.asynccontextmanager
7773
async def with_async_pg_engine(
78-
settings: PostgresSettings,
74+
settings: PostgresSettings, *, application_name: str
7975
) -> AsyncIterator[AsyncEngine]:
8076
"""
8177
Creates an asyncpg engine and ensures it is properly closed after use.
@@ -86,9 +82,11 @@ async def with_async_pg_engine(
8682
logging.DEBUG,
8783
f"connection to db {settings.dsn_with_async_sqlalchemy}",
8884
):
89-
server_settings = None
90-
if settings.POSTGRES_CLIENT_NAME:
91-
assert isinstance(settings.POSTGRES_CLIENT_NAME, str)
85+
server_settings = {
86+
"application_name": settings.client_name(
87+
application_name, suffix="asyncpg"
88+
),
89+
}
9290

9391
engine = create_async_engine(
9492
settings.dsn_with_async_sqlalchemy,

packages/service-library/src/servicelib/docker_utils.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
)
2323
from settings_library.docker_registry import RegistrySettings
2424
from tenacity import (
25+
before_sleep_log,
2526
retry,
2627
retry_if_exception_type,
2728
stop_after_attempt,
@@ -275,6 +276,7 @@ def _reset_progress_from_previous_attempt() -> None:
275276
stop=stop_after_attempt(retry_upon_error_count),
276277
reraise=True,
277278
retry=retry_if_exception_type(asyncio.TimeoutError),
279+
before_sleep=before_sleep_log(_logger, logging.WARNING),
278280
)
279281
async def _pull_image_with_retry() -> None:
280282
nonlocal attempt

packages/service-library/src/servicelib/fastapi/db_asyncpg_engine.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,9 @@
1414
_logger = logging.getLogger(__name__)
1515

1616

17-
async def connect_to_db(app: FastAPI, settings: PostgresSettings) -> None:
17+
async def connect_to_db(
18+
app: FastAPI, settings: PostgresSettings, application_name: str
19+
) -> None:
1820
warnings.warn(
1921
"The 'connect_to_db' function is deprecated and will be removed in a future release. "
2022
"Please use 'postgres_lifespan' instead for managing the database connection lifecycle.",
@@ -27,7 +29,9 @@ async def connect_to_db(app: FastAPI, settings: PostgresSettings) -> None:
2729
logging.DEBUG,
2830
f"Connecting and migraging {settings.dsn_with_async_sqlalchemy}",
2931
):
30-
engine = await create_async_engine_and_database_ready(settings)
32+
engine = await create_async_engine_and_database_ready(
33+
settings, application_name
34+
)
3135

3236
app.state.engine = engine
3337
_logger.debug(

0 commit comments

Comments
 (0)