Skip to content

Commit c46b882

Browse files
authored
Merge branch 'master' into enh/retry-logout
2 parents a2d2069 + 1032d56 commit c46b882

File tree

10 files changed

+171
-66
lines changed

10 files changed

+171
-66
lines changed

services/director-v2/src/simcore_service_director_v2/api/routes/computations.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@
3737
from models_library.utils.fastapi_encoders import jsonable_encoder
3838
from pydantic import AnyHttpUrl, parse_obj_as
3939
from servicelib.async_utils import run_sequentially_in_context
40+
from servicelib.logging_utils import log_decorator
4041
from servicelib.rabbitmq import RabbitMQRPCClient
4142
from simcore_postgres_database.utils_projects_metadata import DBProjectNotFoundError
4243
from starlette import status
@@ -150,6 +151,7 @@ async def _check_pipeline_startable(
150151
_UNKNOWN_NODE: Final[str] = "unknown node"
151152

152153

154+
@log_decorator(_logger)
153155
async def _get_project_metadata(
154156
project_id: ProjectID,
155157
project_repo: ProjectsRepository,
@@ -160,7 +162,7 @@ async def _get_project_metadata(
160162
project_id
161163
)
162164
if project_ancestors.parent_project_uuid is None:
163-
# no parents here
165+
_logger.debug("no parent found for project %s", project_id)
164166
return {}
165167

166168
assert project_ancestors.parent_node_id is not None # nosec

services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ async def get(
5050
)
5151
row: RowProxy | None = await result.first()
5252
if not row:
53-
raise ComputationalRunNotFoundError()
53+
raise ComputationalRunNotFoundError
5454
return CompRunsAtDB.from_orm(row)
5555

5656
async def list(
@@ -80,7 +80,7 @@ async def create(
8080
project_id: ProjectID,
8181
cluster_id: ClusterID,
8282
iteration: PositiveInt | None = None,
83-
metadata: RunMetadataDict | None,
83+
metadata: RunMetadataDict,
8484
use_on_demand_clusters: bool,
8585
) -> CompRunsAtDB:
8686
try:
@@ -102,13 +102,13 @@ async def create(
102102
.values(
103103
user_id=user_id,
104104
project_uuid=f"{project_id}",
105-
cluster_id=cluster_id
106-
if cluster_id != DEFAULT_CLUSTER_ID
107-
else None,
105+
cluster_id=(
106+
cluster_id if cluster_id != DEFAULT_CLUSTER_ID else None
107+
),
108108
iteration=iteration,
109109
result=RUNNING_STATE_TO_DB[RunningState.PUBLISHED],
110-
started=datetime.datetime.now(tz=datetime.timezone.utc),
111-
metadata=jsonable_encoder(metadata) if metadata else None,
110+
started=datetime.datetime.now(tz=datetime.UTC),
111+
metadata=jsonable_encoder(metadata),
112112
use_on_demand_clusters=use_on_demand_clusters,
113113
)
114114
.returning(literal_column("*"))
@@ -146,7 +146,7 @@ async def set_run_result(
146146
) -> CompRunsAtDB | None:
147147
values: dict[str, Any] = {"result": RUNNING_STATE_TO_DB[result_state]}
148148
if final_state:
149-
values.update({"ended": datetime.datetime.now(tz=datetime.timezone.utc)})
149+
values.update({"ended": datetime.datetime.now(tz=datetime.UTC)})
150150
return await self.update(
151151
user_id,
152152
project_id,

services/director-v2/tests/unit/with_dbs/conftest.py

Lines changed: 54 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -8,15 +8,16 @@
88
import datetime
99
import json
1010
from collections.abc import Awaitable, Callable, Iterator
11-
from typing import Any
11+
from typing import Any, cast
1212
from uuid import uuid4
1313

1414
import pytest
1515
import sqlalchemy as sa
1616
from _helpers import PublishedProject, RunningProject
1717
from faker import Faker
18+
from fastapi.encoders import jsonable_encoder
1819
from models_library.clusters import Cluster
19-
from models_library.projects import ProjectAtDB
20+
from models_library.projects import ProjectAtDB, ProjectID
2021
from models_library.projects_nodes_io import NodeID
2122
from pydantic.main import BaseModel
2223
from simcore_postgres_database.models.cluster_to_groups import cluster_to_groups
@@ -25,7 +26,11 @@
2526
from simcore_postgres_database.models.comp_runs import comp_runs
2627
from simcore_postgres_database.models.comp_tasks import comp_tasks
2728
from simcore_service_director_v2.models.comp_pipelines import CompPipelineAtDB
28-
from simcore_service_director_v2.models.comp_runs import CompRunsAtDB, RunMetadataDict
29+
from simcore_service_director_v2.models.comp_runs import (
30+
CompRunsAtDB,
31+
ProjectMetadataDict,
32+
RunMetadataDict,
33+
)
2934
from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB, Image
3035
from simcore_service_director_v2.utils.computations import to_node_class
3136
from simcore_service_director_v2.utils.dask import generate_dask_job_id
@@ -84,28 +89,36 @@ def creator(
8489
"project_id": f"{project.uuid}",
8590
"node_id": f"{node_id}",
8691
"schema": {"inputs": {}, "outputs": {}},
87-
"inputs": {
88-
key: json.loads(value.json(by_alias=True, exclude_unset=True))
89-
if isinstance(value, BaseModel)
90-
else value
91-
for key, value in node_data.inputs.items()
92-
}
93-
if node_data.inputs
94-
else {},
95-
"outputs": {
96-
key: json.loads(value.json(by_alias=True, exclude_unset=True))
97-
if isinstance(value, BaseModel)
98-
else value
99-
for key, value in node_data.outputs.items()
100-
}
101-
if node_data.outputs
102-
else {},
92+
"inputs": (
93+
{
94+
key: (
95+
json.loads(value.json(by_alias=True, exclude_unset=True))
96+
if isinstance(value, BaseModel)
97+
else value
98+
)
99+
for key, value in node_data.inputs.items()
100+
}
101+
if node_data.inputs
102+
else {}
103+
),
104+
"outputs": (
105+
{
106+
key: (
107+
json.loads(value.json(by_alias=True, exclude_unset=True))
108+
if isinstance(value, BaseModel)
109+
else value
110+
)
111+
for key, value in node_data.outputs.items()
112+
}
113+
if node_data.outputs
114+
else {}
115+
),
103116
"image": Image(name=node_data.key, tag=node_data.version).dict( # type: ignore
104117
by_alias=True, exclude_unset=True
105118
), # type: ignore
106119
"node_class": to_node_class(node_data.key),
107120
"internal_id": internal_id + 1,
108-
"submit": datetime.datetime.now(tz=datetime.timezone.utc),
121+
"submit": datetime.datetime.now(tz=datetime.UTC),
109122
"job_id": generate_dask_job_id(
110123
service_key=node_data.key,
111124
service_version=node_data.version,
@@ -135,9 +148,26 @@ def creator(
135148
)
136149

137150

151+
@pytest.fixture
152+
def project_metadata(faker: Faker) -> ProjectMetadataDict:
153+
return ProjectMetadataDict(
154+
parent_node_id=cast(NodeID, faker.uuid4(cast_to=None)),
155+
parent_node_name=faker.pystr(),
156+
parent_project_id=cast(ProjectID, faker.uuid4(cast_to=None)),
157+
parent_project_name=faker.pystr(),
158+
root_parent_project_id=cast(ProjectID, faker.uuid4(cast_to=None)),
159+
root_parent_project_name=faker.pystr(),
160+
root_parent_node_id=cast(NodeID, faker.uuid4(cast_to=None)),
161+
root_parent_node_name=faker.pystr(),
162+
)
163+
164+
138165
@pytest.fixture
139166
def run_metadata(
140-
osparc_product_name: str, simcore_user_agent: str, faker: Faker
167+
osparc_product_name: str,
168+
simcore_user_agent: str,
169+
project_metadata: ProjectMetadataDict,
170+
faker: Faker,
141171
) -> RunMetadataDict:
142172
return RunMetadataDict(
143173
node_id_names_map={},
@@ -147,6 +177,7 @@ def run_metadata(
147177
user_email=faker.email(),
148178
wallet_id=faker.pyint(min_value=1),
149179
wallet_name=faker.name(),
180+
project_metadata=project_metadata,
150181
)
151182

152183

@@ -171,7 +202,7 @@ def creator(
171202
with postgres_db.connect() as conn:
172203
result = conn.execute(
173204
comp_runs.insert()
174-
.values(**run_config)
205+
.values(**jsonable_encoder(run_config))
175206
.returning(sa.literal_column("*"))
176207
)
177208
new_run = CompRunsAtDB.from_orm(result.first())
@@ -298,7 +329,7 @@ async def running_project(
298329
project=created_project,
299330
state=StateType.RUNNING,
300331
progress=0.0,
301-
start=datetime.datetime.now(tz=datetime.timezone.utc),
332+
start=datetime.datetime.now(tz=datetime.UTC),
302333
),
303334
runs=runs(user=user, project=created_project, result=StateType.RUNNING),
304335
)

services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py

Lines changed: 11 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -381,22 +381,17 @@ async def test_misconfigured_pipeline_is_not_scheduled(
381381
)
382382
run_entry = CompRunsAtDB.parse_obj(await result.first())
383383
assert run_entry.result == RunningState.ABORTED
384+
assert run_entry.metadata == run_metadata
384385

385386

386387
async def _assert_start_pipeline(
387-
aiopg_engine, published_project: PublishedProject, scheduler: BaseCompScheduler
388+
aiopg_engine,
389+
published_project: PublishedProject,
390+
scheduler: BaseCompScheduler,
391+
run_metadata: RunMetadataDict,
388392
) -> list[CompTaskAtDB]:
389393
exp_published_tasks = deepcopy(published_project.tasks)
390394
assert published_project.project.prj_owner
391-
run_metadata = RunMetadataDict(
392-
node_id_names_map={},
393-
project_name="",
394-
product_name="",
395-
simcore_user_agent="",
396-
user_email="",
397-
wallet_id=231,
398-
wallet_name="",
399-
)
400395
await scheduler.run_new_pipeline(
401396
user_id=published_project.project.prj_owner,
402397
project_id=published_project.project.uuid,
@@ -618,11 +613,12 @@ async def test_proper_pipeline_is_scheduled( # noqa: PLR0915
618613
mocked_clean_task_output_and_log_files_if_invalid: None,
619614
instrumentation_rabbit_client_parser: mock.AsyncMock,
620615
resource_tracking_rabbit_client_parser: mock.AsyncMock,
616+
run_metadata: RunMetadataDict,
621617
):
622618
_mock_send_computation_tasks(published_project.tasks, mocked_dask_client)
623619

624620
expected_published_tasks = await _assert_start_pipeline(
625-
aiopg_engine, published_project, scheduler
621+
aiopg_engine, published_project, scheduler, run_metadata
626622
)
627623

628624
# -------------------------------------------------------------------------------
@@ -990,10 +986,11 @@ async def test_task_progress_triggers(
990986
published_project: PublishedProject,
991987
mocked_parse_output_data_fct: None,
992988
mocked_clean_task_output_and_log_files_if_invalid: None,
989+
run_metadata: RunMetadataDict,
993990
):
994991
_mock_send_computation_tasks(published_project.tasks, mocked_dask_client)
995992
expected_published_tasks = await _assert_start_pipeline(
996-
aiopg_engine, published_project, scheduler
993+
aiopg_engine, published_project, scheduler, run_metadata
997994
)
998995
# -------------------------------------------------------------------------------
999996
# 1. first run will move comp_tasks to PENDING so the worker can take them
@@ -1286,10 +1283,11 @@ async def test_running_pipeline_triggers_heartbeat(
12861283
aiopg_engine: aiopg.sa.engine.Engine,
12871284
published_project: PublishedProject,
12881285
resource_tracking_rabbit_client_parser: mock.AsyncMock,
1286+
run_metadata: RunMetadataDict,
12891287
):
12901288
_mock_send_computation_tasks(published_project.tasks, mocked_dask_client)
12911289
expected_published_tasks = await _assert_start_pipeline(
1292-
aiopg_engine, published_project, scheduler
1290+
aiopg_engine, published_project, scheduler, run_metadata
12931291
)
12941292
# -------------------------------------------------------------------------------
12951293
# 1. first run will move comp_tasks to PENDING so the worker can take them

services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js

Lines changed: 18 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -93,15 +93,15 @@ qx.Class.define("osparc.auth.ui.LoginView", {
9393
createAccountBtn.setLabel(this.tr("Request Account"));
9494
}
9595
createAccountBtn.addListener("execute", () => {
96-
createAccountBtn.setEnabled(false);
97-
if (createAccountAction === "REGISTER") {
96+
if (window.location.hostname === "tip.itis.swiss") {
97+
this.__openTIPITISSWISSPhaseOutDialog();
98+
} else if (createAccountAction === "REGISTER") {
9899
this.fireEvent("toRegister");
99100
} else if (createAccountAction === "REQUEST_ACCOUNT_FORM") {
100101
this.fireEvent("toRequestAccount");
101102
} else if (createAccountAction === "REQUEST_ACCOUNT_INSTRUCTIONS") {
102103
osparc.store.Support.openInvitationRequiredDialog();
103104
}
104-
createAccountBtn.setEnabled(true);
105105
}, this);
106106
osparc.utils.Utils.setIdToWidget(createAccountBtn, "loginCreateAccountBtn");
107107

@@ -162,6 +162,21 @@ qx.Class.define("osparc.auth.ui.LoginView", {
162162
}
163163
},
164164

165+
__openTIPITISSWISSPhaseOutDialog: function() {
166+
const createAccountWindow = new osparc.ui.window.Dialog("Request Account").set({
167+
maxWidth: 380
168+
});
169+
let message = "This version of the planning tool will be phased out soon and no longer accepts new users.";
170+
message += "<br>";
171+
const tipLiteLabel = osparc.utils.Utils.createHTMLLink("TIP.lite", "https://tip-lite.science/");
172+
const tipLabel = osparc.utils.Utils.createHTMLLink("TIP", "https://tip.science/");
173+
const hereLabel = osparc.utils.Utils.createHTMLLink("here", "https://itis.swiss/tools-and-systems/ti-planning/overview/");
174+
message += `Please visit ${tipLiteLabel} or ${tipLabel} instead. See ${hereLabel} for more information.`;
175+
createAccountWindow.setMessage(message);
176+
createAccountWindow.center();
177+
createAccountWindow.open();
178+
},
179+
165180
getEmail: function() {
166181
const email = this._form.getItems().email;
167182
return email.getValue();

services/static-webserver/client/source/class/osparc/utils/Utils.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ qx.Class.define("osparc.utils.Utils", {
9494
replaceTokens: function(str, key, value) {
9595
// `str` might be a a localized string, get the string first
9696
str = str.toString ? str.toString() : str;
97-
const regex = new RegExp("${"+key+"}", "g");
97+
const regex = new RegExp("\\${"+key+"\\}", "g");
9898
return str.replace(regex, value);
9999
},
100100

services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
"selector": "osparc-test-id=templatesTabBtn"
2222
},
2323
"anchorEl": "osparc-test-id=templatesTabBtn",
24-
"text": "Clicking on a Tutorial will create a copy of that Study, which will appear in your own Project ts tab with thee same name as the Tutorial. Any changes you make to this copy will not affect the original Tutorial.",
24+
"text": "Clicking on a Tutorial will create a copy of that Project, which will appear in your own Projects tab with the same name as the Tutorial. Any changes you make to this copy will not affect the original Tutorial.",
2525
"placement": "bottom"
2626
}, {
2727
"beforeClick": {
@@ -30,13 +30,6 @@
3030
"anchorEl": "osparc-test-id=servicesTabBtn",
3131
"text": "Every Project in Sim4Life is composed of at lease one so-called Service.<br>Services are building blocks for Studies and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Project.",
3232
"placement": "bottom"
33-
}, {
34-
"beforeClick": {
35-
"selector": "osparc-test-id=dataTabBtn"
36-
},
37-
"anchorEl": "osparc-test-id=dataTabBtn",
38-
"text": "The Data tab allow you to browse the output files of all of your active pipelines in one place. This is most useful for downloading the data created on the online platform.",
39-
"placement": "bottom"
4033
}]
4134
},
4235
"navbar": {
@@ -50,7 +43,7 @@
5043
"event": "tap"
5144
},
5245
"anchorEl": "osparc-test-id=notificationsContainer",
53-
"text": "By clicking on the Bell, you will you see notifications about what Studies, Credits and Organizations.",
46+
"text": "By clicking on the Bell, you will you see notifications about which Projects, Credits and Organizations have been shared with you.",
5447
"placement": "bottom"
5548
}, {
5649
"beforeClick": {

0 commit comments

Comments
 (0)