Skip to content

Commit 73c8c92

Browse files
fix
1 parent 1c4df32 commit 73c8c92

File tree

4 files changed

+45
-46
lines changed

4 files changed

+45
-46
lines changed

packages/postgres-database/src/simcore_postgres_database/utils_comp_run_snapshot_tasks.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,4 +56,5 @@ async def update_for_run_id_and_node_id(
5656
row = await result.one_or_none()
5757
if row is None:
5858
raise ValueError("improve message")
59+
# return None
5960
return row

packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py

Lines changed: 22 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
from pydantic import TypeAdapter
88
from servicelib.db_asyncpg_utils import create_async_engine_and_database_ready
99
from settings_library.node_ports import NodePortsSettings
10-
from simcore_postgres_database.models.comp_tasks import comp_tasks
10+
from simcore_postgres_database.models.comp_tasks import NodeClass, comp_tasks
1111
from simcore_postgres_database.models.projects import projects
1212
from simcore_postgres_database.utils_comp_run_snapshot_tasks import (
1313
update_for_run_id_and_node_id,
@@ -111,24 +111,28 @@ async def write_ports_configuration(
111111
run_hash=node_configuration.get("run_hash"),
112112
)
113113
)
114-
# 2. Get latest run id for the project
115-
_latest_run_id = await get_latest_run_id_for_project(
116-
engine, connection, project_id=project_id
117-
)
118114

119-
# 3. Update comp_run_snapshot_tasks table
120-
await update_for_run_id_and_node_id(
121-
engine,
122-
connection,
123-
run_id=_latest_run_id,
124-
node_id=node_uuid,
125-
data={
126-
"schema": node_configuration["schema"],
127-
"inputs": node_configuration["inputs"],
128-
"outputs": node_configuration["outputs"],
129-
"run_hash": node_configuration.get("run_hash"),
130-
},
131-
)
115+
# 2. Update comp_run_snapshot_tasks table only if the node is computational
116+
node = await _get_node_from_db(project_id, node_uuid, connection)
117+
if node.node_class == NodeClass.COMPUTATIONAL.value:
118+
# 2.1 Get latest run id for the project
119+
_latest_run_id = await get_latest_run_id_for_project(
120+
engine, connection, project_id=project_id
121+
)
122+
123+
# 2.2 Update comp_run_snapshot_tasks table
124+
await update_for_run_id_and_node_id(
125+
engine,
126+
connection,
127+
run_id=_latest_run_id,
128+
node_id=node_uuid,
129+
data={
130+
"schema": node_configuration["schema"],
131+
"inputs": node_configuration["inputs"],
132+
"outputs": node_configuration["outputs"],
133+
"run_hash": node_configuration.get("run_hash"),
134+
},
135+
)
132136

133137
async def get_ports_configuration_from_node_uuid(
134138
self, project_id: str, node_uuid: str

services/director-v2/src/simcore_service_director_v2/models/comp_run_snapshot_tasks.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1+
from models_library.resource_tracker import HardwareInfo
12
from pydantic import ConfigDict, PositiveInt
23

3-
from .comp_tasks import BaseCompTaskAtDB, HardwareInfo, Image
4+
from .comp_tasks import BaseCompTaskAtDB, Image
45

56

67
class CompRunSnapshotTaskAtDBGet(BaseCompTaskAtDB):
Lines changed: 20 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,11 @@
11
import logging
22

3-
from models_library.projects_nodes_io import NodeID
4-
from pydantic import PositiveInt
53
from simcore_postgres_database.utils_comp_run_snapshot_tasks import (
64
COMP_RUN_SNAPSHOT_TASKS_DB_COLS,
7-
update_for_run_id_and_node_id,
85
)
96
from simcore_postgres_database.utils_repos import (
107
transaction_context,
118
)
12-
from simcore_service_director_v2.models.comp_run_snapshot_tasks import (
13-
CompRunSnapshotTaskAtDBGet,
14-
)
15-
from sqlalchemy.ext.asyncio import AsyncConnection
169

1710
from ..tables import comp_run_snapshot_tasks
1811
from ._base import BaseRepository
@@ -24,18 +17,18 @@ class CompRunsSnapshotTasksRepository(BaseRepository):
2417

2518
async def batch_create(
2619
self, *, data: list[dict]
27-
) -> list[CompRunSnapshotTaskAtDBGet]:
20+
) -> None: # list[CompRunSnapshotTaskAtDBGet]:
2821
async with transaction_context(self.db_engine) as conn:
2922

3023
try:
31-
result = await conn.execute(
24+
await conn.execute(
3225
comp_run_snapshot_tasks.insert().returning(
3326
*COMP_RUN_SNAPSHOT_TASKS_DB_COLS
3427
),
3528
data,
3629
)
37-
rows = result.fetchall()
38-
return [CompRunSnapshotTaskAtDBGet.model_validate(row) for row in rows]
30+
# rows = result.fetchall()
31+
# return [CompRunSnapshotTaskAtDBGet.model_validate(row) for row in rows]
3932
except Exception as e:
4033
logger.error(
4134
"Failed to batch create comp run snapshot tasks: %s",
@@ -44,19 +37,19 @@ async def batch_create(
4437
)
4538
raise
4639

47-
async def update_for_run_id_and_node_id(
48-
self,
49-
connection: AsyncConnection | None = None,
50-
*,
51-
run_id: PositiveInt,
52-
node_id: NodeID,
53-
data: dict,
54-
) -> CompRunSnapshotTaskAtDBGet:
55-
row = await update_for_run_id_and_node_id(
56-
self.db_engine,
57-
conn=connection,
58-
run_id=run_id,
59-
node_id=f"{node_id}",
60-
data=data,
61-
)
62-
return CompRunSnapshotTaskAtDBGet.model_validate(row)
40+
# async def update_for_run_id_and_node_id(
41+
# self,
42+
# connection: AsyncConnection | None = None,
43+
# *,
44+
# run_id: PositiveInt,
45+
# node_id: NodeID,
46+
# data: dict,
47+
# ) -> CompRunSnapshotTaskAtDBGet:
48+
# row = await update_for_run_id_and_node_id(
49+
# self.db_engine,
50+
# conn=connection,
51+
# run_id=run_id,
52+
# node_id=f"{node_id}",
53+
# data=data,
54+
# )
55+
# return CompRunSnapshotTaskAtDBGet.model_validate(row)

0 commit comments

Comments
 (0)