Skip to content

Commit d1a8b8f

Browse files
Merge branch 'master' into 8292-create-custom-GenerateJsonSchema-for-resolving-references
2 parents 9bd4260 + 278df26 commit d1a8b8f

File tree

44 files changed

+489
-251
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

44 files changed

+489
-251
lines changed

.env-devel

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@ CELERY_RESULT_EXPIRES=P7D
5353

5454
CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}'
5555
CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG=master-github-latest
56+
CLUSTERS_KEEPER_DASK_NPROCS=1
5657
CLUSTERS_KEEPER_DASK_NTHREADS=0
5758
CLUSTERS_KEEPER_DASK_WORKER_SATURATION=inf
5859
CLUSTERS_KEEPER_EC2_ACCESS=null

packages/notifications-library/src/notifications_library/templates/on_account_requested.email.content.html

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,11 @@
44
<p>Dear Support team
55

66
<p>
7-
We have received the following request form for an account in {{ product.display_name }} from <b>{{ host }}</b>
7+
We have received the following request form for an account in :
8+
<ol>
9+
<li>Product: <b>{{ product.display_name }}</b></li>
10+
<li>Host: <b>{{ host }}</b></li>
11+
</ol>
812
</p>
913

1014
<pre>

packages/notifications-library/src/notifications_library/templates/on_account_requested.email.content.txt

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
Dear Support team,
22

3-
We have received the following request form for an account in {{ product.display_name }} from **{{ host }}**:
3+
We have received the following request form for an account in :
4+
- Product: **{{ product.display_name }}**
5+
- Host: **{{ host }}**
46

57
{{ dumps(request_form) }}
68

services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -427,10 +427,17 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings):
427427
),
428428
]
429429

430+
CLUSTERS_KEEPER_DASK_NPROCS: Annotated[
431+
int,
432+
Field(
433+
description="overrides the default number of worker processes in the dask-sidecars, setting it to negative values will use dask defaults (see description in 'dask worker --help')",
434+
),
435+
]
436+
430437
CLUSTERS_KEEPER_DASK_NTHREADS: Annotated[
431438
NonNegativeInt,
432439
Field(
433-
description="overrides the default number of threads in the dask-sidecars, setting it to 0 will use the default (see description in dask-sidecar)",
440+
description="overrides the default number of threads per process in the dask-sidecars, setting it to 0 will use the default (see description in dask-sidecar)",
434441
),
435442
]
436443

services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ services:
5656
- cluster
5757
environment:
5858
DASK_LOG_FORMAT_LOCAL_DEV_ENABLED: 1
59-
DASK_NPROCS: 1
59+
DASK_NPROCS: ${DASK_NPROCS}
6060
DASK_NTHREADS: ${DASK_NTHREADS}
6161
DASK_SCHEDULER_URL: tls://dask-scheduler:8786
6262
DASK_SIDECAR_NON_USABLE_RAM: 0

services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,8 @@ def _convert_to_env_dict(entries: dict[str, Any]) -> str:
8989
f"CLUSTERS_KEEPER_EC2_ENDPOINT={app_settings.CLUSTERS_KEEPER_EC2_ACCESS.EC2_ENDPOINT or 'null'}",
9090
f"CLUSTERS_KEEPER_EC2_REGION_NAME={app_settings.CLUSTERS_KEEPER_EC2_ACCESS.EC2_REGION_NAME}",
9191
f"CLUSTERS_KEEPER_EC2_SECRET_ACCESS_KEY={app_settings.CLUSTERS_KEEPER_EC2_ACCESS.EC2_SECRET_ACCESS_KEY}",
92-
f"DASK_NTHREADS={app_settings.CLUSTERS_KEEPER_DASK_NTHREADS or ''}",
92+
f"DASK_NPROCS={app_settings.CLUSTERS_KEEPER_DASK_NPROCS}",
93+
f"DASK_NTHREADS={app_settings.CLUSTERS_KEEPER_DASK_NTHREADS}",
9394
f"DASK_TLS_CA_FILE={_HOST_TLS_CA_FILE_PATH}",
9495
f"DASK_TLS_CERT={_HOST_TLS_CERT_FILE_PATH}",
9596
f"DASK_TLS_KEY={_HOST_TLS_KEY_FILE_PATH}",

services/clusters-keeper/tests/unit/conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -130,6 +130,7 @@ def app_environment(
130130
"CLUSTERS_KEEPER_SSM_SECRET_ACCESS_KEY": faker.pystr(),
131131
"CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES": "{}",
132132
"CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX": faker.pystr(),
133+
"CLUSTERS_KEEPER_DASK_NPROCS": f"{faker.pyint()}",
133134
"CLUSTERS_KEEPER_DASK_NTHREADS": f"{faker.pyint(min_value=0)}",
134135
"CLUSTERS_KEEPER_DASK_WORKER_SATURATION": f"{faker.pyfloat(min_value=0.1)}",
135136
"CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH": "{}",

services/dask-sidecar/docker/boot.sh

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -171,14 +171,14 @@ else
171171
# 'daemonic processes are not allowed to have children' arises when running the sidecar.cli
172172
# because multi-processing library is used by the sidecar and the nanny does not like it
173173
# setting --no-nanny fixes this: see https://github.com/dask/distributed/issues/2142
174-
print_info "Starting as a dask worker "${DASK_WORKER_VERSION}" -> "${DASK_SCHEDULER_URL}" ..."
175-
print_info "Worker resources set as: "$resources""
174+
print_info "Starting as a dask worker ${DASK_WORKER_VERSION} -> ${DASK_SCHEDULER_URL} ..."
175+
print_info "Worker resources set as: $resources"
176176
if [ "${SC_BOOT_MODE}" = "debug" ]; then
177177
exec watchmedo auto-restart --recursive --pattern="*.py;*/src/*" --ignore-patterns="*test*;pytest_simcore/*;setup.py;*ignore*" --ignore-directories -- \
178178
dask worker "${DASK_SCHEDULER_URL}" \
179179
--local-directory /tmp/dask-sidecar \
180180
--preload simcore_service_dask_sidecar.worker \
181-
--nworkers ${DASK_NPROCS} \
181+
--nworkers "${DASK_NPROCS}" \
182182
--nthreads "${DASK_NTHREADS}" \
183183
--dashboard-address 8787 \
184184
--memory-limit "${DASK_MEMORY_LIMIT}" \
@@ -188,7 +188,7 @@ else
188188
exec dask worker "${DASK_SCHEDULER_URL}" \
189189
--local-directory /tmp/dask-sidecar \
190190
--preload simcore_service_dask_sidecar.worker \
191-
--nworkers ${DASK_NPROCS} \
191+
--nworkers "${DASK_NPROCS}" \
192192
--nthreads "${DASK_NTHREADS}" \
193193
--dashboard-address 8787 \
194194
--memory-limit "${DASK_MEMORY_LIMIT}" \

services/docker-compose.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -221,6 +221,7 @@ services:
221221
<<: *tracing_open_telemetry_environs
222222
CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG: ${CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG}
223223
CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH: ${CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH}
224+
CLUSTERS_KEEPER_DASK_NPROCS: ${CLUSTERS_KEEPER_DASK_NPROCS}
224225
CLUSTERS_KEEPER_DASK_NTHREADS: ${CLUSTERS_KEEPER_DASK_NTHREADS}
225226
CLUSTERS_KEEPER_DASK_WORKER_SATURATION: ${CLUSTERS_KEEPER_DASK_WORKER_SATURATION}
226227
CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION: ${CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION}

services/static-webserver/client/source/class/osparc/dashboard/CardBase.js

Lines changed: 19 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ qx.Class.define("osparc.dashboard.CardBase", {
5252
statics: {
5353
SHARE_ICON: "@FontAwesome5Solid/share-alt/13",
5454
SHARED_USER: "@FontAwesome5Solid/user/13",
55+
SHARED_SUPPORT: "@FontAwesome5Solid/question/13",
5556
SHARED_ORGS: "@FontAwesome5Solid/users/13",
5657
SHARED_ALL: "@FontAwesome5Solid/globe/13",
5758
PERM_READ: "@FontAwesome5Solid/eye/13",
@@ -188,17 +189,25 @@ qx.Class.define("osparc.dashboard.CardBase", {
188189
// Icon
189190
const groupsStore = osparc.store.Groups.getInstance();
190191
const everyoneGroupIds = groupsStore.getEveryoneGroupIds();
192+
const supportGroup = groupsStore.getSupportGroup();
191193
const organizations = groupsStore.getOrganizations();
192194
const myGroupId = groupsStore.getMyGroupId();
193195

194196
const organizationIds = Object.keys(organizations).map(key => parseInt(key));
195197
if (gids.some(gid => everyoneGroupIds.includes(gid))) {
198+
// shared with "1" or product everyone
196199
shareIcon.setSource(osparc.dashboard.CardBase.SHARED_ALL);
200+
} else if (supportGroup && gids.includes(supportGroup.getGroupId())) {
201+
// shared with support group, show as if it was a group
202+
shareIcon.setSource(osparc.dashboard.CardBase.SHARED_ORGS);
197203
} else if (organizationIds.filter(value => gids.includes(value)).length) { // find intersection
204+
// shared with at least one organization
198205
shareIcon.setSource(osparc.dashboard.CardBase.SHARED_ORGS);
199206
} else if (gids.length === 1 && gids[0] === myGroupId) {
207+
// not shared
200208
shareIcon.setSource(osparc.dashboard.CardBase.SHARE_ICON);
201209
} else {
210+
// shared with some users
202211
shareIcon.setSource(osparc.dashboard.CardBase.SHARED_USER);
203212
}
204213

@@ -227,10 +236,14 @@ qx.Class.define("osparc.dashboard.CardBase", {
227236
addHintFromGids: function(icon, gids) {
228237
const groupsStore = osparc.store.Groups.getInstance();
229238
const everyoneGroups = groupsStore.getEveryoneGroups();
239+
const supportGroup = groupsStore.getSupportGroup();
230240
const organizations = groupsStore.getOrganizations();
231241
const myGroupId = groupsStore.getMyGroupId();
232242

233243
const groups = everyoneGroups.slice();
244+
if (supportGroup) {
245+
groups.push(supportGroup);
246+
}
234247
groups.push(...Object.values(organizations));
235248
const sharedGrps = [];
236249
groups.forEach(group => {
@@ -267,10 +280,14 @@ qx.Class.define("osparc.dashboard.CardBase", {
267280
sharedGrpLabels.push("...");
268281
break;
269282
}
270-
let sharedGrpLabel = sharedGrps[i].getLabel();
271-
if (everyoneGroups.includes(sharedGrps[i])) {
283+
const sharedGroup = sharedGrps[i];
284+
let sharedGrpLabel = sharedGroup.getLabel();
285+
if (everyoneGroups.includes(sharedGroup)) {
272286
sharedGrpLabel = "Public";
273287
}
288+
if (supportGroup && supportGroup.getGroupId() === sharedGroup.getGroupId()) {
289+
sharedGrpLabel = supportGroup.getLabel();
290+
}
274291
if (!sharedGrpLabels.includes(sharedGrpLabel)) {
275292
sharedGrpLabels.push(sharedGrpLabel);
276293
}

0 commit comments

Comments
 (0)