Skip to content

Commit a845c1f

Browse files
authored
Bugfix/allow removing outputs from services (#2063)
* correctly handle removal of files from outputs * ensure frontend removes deleted outputs
1 parent 91e9ab7 commit a845c1f

File tree

6 files changed

+43
-23
lines changed

6 files changed

+43
-23
lines changed

services/director-v2/src/simcore_service_director_v2/api/routes/computations.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
is_pipeline_stopped,
3535
)
3636
from ...utils.dags import create_dag_graph, create_minimal_graph_based_on_selection
37-
from ...utils.exceptions import ProjectNotFoundError
37+
from ...utils.exceptions import PipelineNotFoundError, ProjectNotFoundError
3838
from ..dependencies.celery import CeleryClient, get_celery_client
3939
from ..dependencies.database import get_repository
4040
from ..dependencies.director_v0 import DirectorV0Client, get_director_v0_client
@@ -228,7 +228,7 @@ async def get_computation(
228228
)
229229
return task_out
230230

231-
except ProjectNotFoundError as e:
231+
except (ProjectNotFoundError, PipelineNotFoundError) as e:
232232
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e
233233

234234
# NOTE: this will be re-used for the prep2go API stuff... don't worry...

services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
from sqlalchemy.dialects.postgresql import insert
99

1010
from ....models.domains.comp_pipelines import CompPipelineAtDB
11+
from ....utils.exceptions import PipelineNotFoundError
1112
from ....utils.logging_utils import log_decorator
1213
from ..tables import comp_pipeline
1314
from ._base import BaseRepository
@@ -24,6 +25,8 @@ async def get_pipeline(self, project_id: ProjectID) -> CompPipelineAtDB:
2425
)
2526
)
2627
row: RowProxy = await result.fetchone()
28+
if not row:
29+
raise PipelineNotFoundError(str(project_id))
2730
return CompPipelineAtDB.from_orm(row)
2831

2932
@log_decorator(logger=logger)

services/director-v2/src/simcore_service_director_v2/utils/exceptions.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,14 @@ def __init__(self, msg: Optional[str] = None):
1111

1212

1313
class ProjectNotFoundError(DirectorException):
14-
"""Service was not found in swarm"""
14+
"""Project not found error"""
1515

1616
def __init__(self, project_id: ProjectID):
1717
super().__init__(f"project {project_id} not found")
18+
19+
20+
class PipelineNotFoundError(DirectorException):
21+
"""Pipeline not found error"""
22+
23+
def __init__(self, pipeline_id: str):
24+
super().__init__(f"pipeline {pipeline_id} not found")

services/web/client/source/class/osparc/data/model/Node.js

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -610,11 +610,15 @@ qx.Class.define("osparc.data.model.Node", {
610610

611611
setOutputData: function(outputs) {
612612
if (outputs) {
613-
for (const outputKey in outputs) {
613+
for (const outputKey in this.__outputs) {
614614
if (!Object.prototype.hasOwnProperty.call(this.__outputs, outputKey)) {
615615
this.__outputs[outputKey] = {};
616616
}
617-
this.__outputs[outputKey]["value"] = outputs[outputKey];
617+
if (Object.prototype.hasOwnProperty.call(outputs, outputKey)) {
618+
this.__outputs[outputKey]["value"] = outputs[outputKey];
619+
} else {
620+
this.__outputs[outputKey]["value"] = "";
621+
}
618622
this.fireDataEvent("outputChanged", outputKey);
619623
}
620624
}

services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import json
77
import logging
88
from pprint import pformat
9-
from typing import Dict, List
9+
from typing import Dict
1010

1111
from aiohttp import web
1212
from aiopg.sa import Engine
@@ -61,11 +61,8 @@ async def _update_project_outputs(
6161
node_uuid: NodeID,
6262
outputs: Dict,
6363
) -> None:
64-
changed_keys: List[str] = list(outputs.keys())
65-
if not changed_keys:
66-
return
67-
68-
project = await projects_api.update_project_node_outputs(
64+
# the new outputs might be {}, or {key_name: payload}
65+
project, changed_keys = await projects_api.update_project_node_outputs(
6966
app,
7067
user_id,
7168
project_uuid,

services/web/server/src/simcore_service_webserver/projects/projects_api.py

Lines changed: 21 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
import logging
1212
from collections import defaultdict
1313
from pprint import pformat
14-
from typing import Any, Dict, List, Optional, Set
14+
from typing import Any, Dict, List, Optional, Set, Tuple
1515
from uuid import uuid4
1616

1717
from aiohttp import web
@@ -344,7 +344,7 @@ async def update_project_node_outputs(
344344
project_id: str,
345345
node_id: str,
346346
data: Optional[Dict],
347-
) -> Dict:
347+
) -> Tuple[Dict, List[str]]:
348348
"""
349349
Updates outputs of a given node in a project with 'data'
350350
"""
@@ -363,25 +363,34 @@ async def update_project_node_outputs(
363363

364364
# NOTE: update outputs (not required) if necessary as the UI expects a
365365
# dataset/label field that is missing
366-
outputs: Dict[str, Any] = project["workbench"][node_id].setdefault("outputs", {})
367-
outputs.update(data)
368-
369-
for output_key in outputs.keys():
370-
if not isinstance(outputs[output_key], dict):
366+
current_outputs = project["workbench"][node_id].setdefault("outputs", {})
367+
new_outputs = data
368+
project["workbench"][node_id]["outputs"] = new_outputs
369+
370+
# find changed keys (the ones that appear or disapppear for sure)
371+
changed_keys = list(current_outputs.keys() ^ new_outputs.keys())
372+
# now check the ones that are in both object
373+
for key in current_outputs.keys() & new_outputs.keys():
374+
if current_outputs[key] != new_outputs[key]:
375+
changed_keys.append(key)
376+
377+
# FIXME: this should be reviewed @maiz. how is an output file defined. I think we have several flavours.
378+
for output_key in new_outputs.keys():
379+
if not isinstance(new_outputs[output_key], dict):
371380
continue
372-
if "path" in outputs[output_key]:
381+
if "path" in new_outputs[output_key]:
373382
# file_id is of type study_id/node_id/file.ext
374-
file_id = outputs[output_key]["path"]
383+
file_id = new_outputs[output_key]["path"]
375384
study_id, _, file_ext = file_id.split("/")
376-
outputs[output_key]["dataset"] = study_id
377-
outputs[output_key]["label"] = file_ext
385+
new_outputs[output_key]["dataset"] = study_id
386+
new_outputs[output_key]["label"] = file_ext
378387

379388
db = app[APP_PROJECT_DBAPI]
380389
updated_project = await db.update_user_project(project, user_id, project_id)
381390
updated_project["state"] = await get_project_state_for_user(
382391
user_id, project_id, app
383392
)
384-
return updated_project
393+
return updated_project, changed_keys
385394

386395

387396
async def get_workbench_node_ids_from_project_uuid(

0 commit comments

Comments
 (0)