Skip to content

Commit cba460c

Browse files
Merge branch 'master' into introduce-vip-models-pricing-3-part
2 parents 1bc8052 + bf06671 commit cba460c

File tree

78 files changed

+1111
-754
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

78 files changed

+1111
-754
lines changed

.env-devel

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,12 @@ AGENT_VOLUMES_CLEANUP_S3_ENDPOINT=http://172.17.0.1:9001
1717
AGENT_VOLUMES_CLEANUP_S3_PROVIDER=MINIO
1818
AGENT_VOLUMES_CLEANUP_S3_REGION=us-east-1
1919
AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY=12345678
20-
AGENT_TRACING={}
20+
AGENT_TRACING=null
2121

2222
API_SERVER_DEV_FEATURES_ENABLED=0
2323
API_SERVER_LOGLEVEL=INFO
2424
API_SERVER_PROFILING=1
25-
API_SERVER_TRACING={}
25+
API_SERVER_TRACING=null
2626
TRAEFIK_API_SERVER_INFLIGHTREQ_AMOUNT=25
2727

2828
AUTOSCALING_DASK=null
@@ -35,7 +35,7 @@ AUTOSCALING_LOGLEVEL=INFO
3535
AUTOSCALING_NODES_MONITORING=null
3636
AUTOSCALING_POLL_INTERVAL="00:00:10"
3737
AUTOSCALING_SSM_ACCESS=null
38-
AUTOSCALING_TRACING={}
38+
AUTOSCALING_TRACING=null
3939

4040
AWS_S3_CLI_S3=null
4141

@@ -47,7 +47,7 @@ CATALOG_PORT=8000
4747
CATALOG_PROFILING=1
4848
CATALOG_SERVICES_DEFAULT_RESOURCES='{"CPU": {"limit": 0.1, "reservation": 0.1}, "RAM": {"limit": 2147483648, "reservation": 2147483648}}'
4949
CATALOG_SERVICES_DEFAULT_SPECIFICATIONS='{}'
50-
CATALOG_TRACING={}
50+
CATALOG_TRACING=null
5151

5252
CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}'
5353
CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG=master-github-latest
@@ -61,7 +61,7 @@ CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION=5
6161
CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES=null
6262
CLUSTERS_KEEPER_TASK_INTERVAL=00:00:30
6363
CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES=null
64-
CLUSTERS_KEEPER_TRACING={}
64+
CLUSTERS_KEEPER_TRACING=null
6565

6666
DASK_SCHEDULER_HOST=dask-scheduler
6767
DASK_SCHEDULER_PORT=8786
@@ -81,7 +81,7 @@ DIRECTOR_PUBLISHED_HOST_NAME="127.0.0.1:9081"
8181
DIRECTOR_REGISTRY_CACHING_TTL=00:15:00
8282
DIRECTOR_REGISTRY_CACHING=True
8383
DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS=null
84-
DIRECTOR_TRACING={}
84+
DIRECTOR_TRACING=null
8585

8686
EFS_USER_ID=8006
8787
EFS_USER_NAME=efs
@@ -90,11 +90,11 @@ EFS_GROUP_NAME=efs-group
9090
EFS_DNS_NAME=fs-xxx.efs.us-east-1.amazonaws.com
9191
EFS_MOUNTED_PATH=/tmp/efs
9292
EFS_PROJECT_SPECIFIC_DATA_DIRECTORY=project-specific-data
93-
EFS_GUARDIAN_TRACING={}
93+
EFS_GUARDIAN_TRACING=null
9494
EFS_DEFAULT_USER_SERVICE_SIZE_BYTES=10000
9595

9696
# DATCORE_ADAPTER
97-
DATCORE_ADAPTER_TRACING={}
97+
DATCORE_ADAPTER_TRACING=null
9898

9999
# DIRECTOR_V2 ----
100100
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}'
@@ -121,14 +121,14 @@ DYNAMIC_SIDECAR_LOG_LEVEL=DEBUG
121121
DYNAMIC_SIDECAR_PROMETHEUS_MONITORING_NETWORKS=[]
122122
DYNAMIC_SIDECAR_PROMETHEUS_SERVICE_LABELS={}
123123
DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT=01:00:00
124-
DIRECTOR_V2_TRACING={}
124+
DIRECTOR_V2_TRACING=null
125125

126126
# DYNAMIC_SCHEDULER ----
127127
DYNAMIC_SCHEDULER_LOGLEVEL=DEBUG
128128
DYNAMIC_SCHEDULER_PROFILING=1
129129
DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER=0
130130
DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT=01:00:00
131-
DYNAMIC_SCHEDULER_TRACING={}
131+
DYNAMIC_SCHEDULER_TRACING=null
132132
DYNAMIC_SCHEDULER_UI_STORAGE_SECRET=adminadmin
133133

134134
FUNCTION_SERVICES_AUTHORS='{"UN": {"name": "Unknown", "email": "[email protected]", "affiliation": "unknown"}}'
@@ -143,7 +143,7 @@ INVITATIONS_PORT=8000
143143
INVITATIONS_SECRET_KEY='REPLACE_ME_with_result__Fernet_generate_key='
144144
INVITATIONS_SWAGGER_API_DOC_ENABLED=1
145145
INVITATIONS_USERNAME=admin
146-
INVITATIONS_TRACING={}
146+
INVITATIONS_TRACING=null
147147

148148
LOG_FORMAT_LOCAL_DEV_ENABLED=1
149149
LOG_FILTER_MAPPING='{}'
@@ -168,7 +168,7 @@ PAYMENTS_STRIPE_API_SECRET='REPLACE_ME_with_api_secret'
168168
PAYMENTS_STRIPE_URL=https://api.stripe.com
169169
PAYMENTS_SWAGGER_API_DOC_ENABLED=1
170170
PAYMENTS_USERNAME=admin
171-
PAYMENTS_TRACING={}
171+
PAYMENTS_TRACING=null
172172

173173
POSTGRES_DB=simcoredb
174174
POSTGRES_ENDPOINT=postgres:5432
@@ -209,7 +209,7 @@ RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_CHECK_ENABLED=1
209209
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL=6
210210
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC=300
211211
RESOURCE_USAGE_TRACKER_S3=null
212-
RESOURCE_USAGE_TRACKER_TRACING={}
212+
RESOURCE_USAGE_TRACKER_TRACING=null
213213

214214
# NOTE: 172.17.0.1 is the docker0 interface, which redirect from inside a container onto the host network interface.
215215
R_CLONE_OPTION_BUFFER_SIZE=16M
@@ -243,7 +243,7 @@ STORAGE_HOST=storage
243243
STORAGE_LOGLEVEL=INFO
244244
STORAGE_PORT=8080
245245
STORAGE_PROFILING=1
246-
STORAGE_TRACING={}
246+
STORAGE_TRACING=null
247247
# STORAGE ----
248248

249249
SWARM_STACK_NAME=master-simcore
@@ -389,6 +389,6 @@ WEBSERVER_SOCKETIO=1
389389
WEBSERVER_STATICWEB={}
390390
WEBSERVER_STUDIES_DISPATCHER={}
391391
WEBSERVER_TAGS=1
392-
WEBSERVER_TRACING={}
392+
WEBSERVER_TRACING=null
393393
WEBSERVER_USERS={}
394394
WEBSERVER_VERSION_CONTROL=1

.github/workflows/ci-testing-deploy.yml

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2664,11 +2664,14 @@ jobs:
26642664

26652665
system-api-specs:
26662666
needs: [changes]
2667-
if: ${{ needs.changes.outputs.anything-py == 'true' && github.event_name == 'push' && github.event.pull_request != null }}
2667+
if: ${{ needs.changes.outputs.anything-py == 'true' || github.event_name == 'push' }}
26682668
timeout-minutes: 10
26692669
name: "[sys] check api-specs are up to date"
26702670
runs-on: ubuntu-latest
26712671
steps:
2672+
- name: Ensure job passes if not PR # ensure pass so upstream jobs which depend on this will run (dockerhub deployment)
2673+
if: ${{ github.event.pull_request == null }}
2674+
run: echo "::notice Passing job because not in PR"; exit 0
26722675
- name: setup python environment
26732676
uses: actions/setup-python@v5
26742677
with:
@@ -2690,11 +2693,14 @@ jobs:
26902693
26912694
system-backwards-compatibility:
26922695
needs: [changes, system-api-specs]
2693-
if: ${{ needs.changes.outputs.anything-py == 'true' && github.event_name == 'push' && github.event.pull_request != null }}
2696+
if: ${{ needs.changes.outputs.anything-py == 'true' || github.event_name == 'push' }}
26942697
timeout-minutes: 10
26952698
name: "[sys] api-server backwards compatibility"
26962699
runs-on: ubuntu-latest
26972700
steps:
2701+
- name: Ensure job passes if not PR # ensure pass so upstream jobs which depend on this will run (dockerhub deployment)
2702+
if: ${{ github.event.pull_request == null }}
2703+
run: echo "::notice Passing job because not in PR"; exit 0
26982704
- name: setup python environment
26992705
uses: actions/setup-python@v5
27002706
with:
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
from typing import Any
2+
3+
4+
class UnSet:
5+
VALUE: "UnSet"
6+
7+
8+
UnSet.VALUE = UnSet()
9+
10+
11+
def as_dict_exclude_unset(**params) -> dict[str, Any]:
12+
return {k: v for k, v in params.items() if not isinstance(v, UnSet)}
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
from typing import Any
2+
3+
from common_library.unset import UnSet, as_dict_exclude_unset
4+
5+
6+
def test_as_dict_exclude_unset():
7+
def f(
8+
par1: str | UnSet = UnSet.VALUE, par2: int | UnSet = UnSet.VALUE
9+
) -> dict[str, Any]:
10+
return as_dict_exclude_unset(par1=par1, par2=par2)
11+
12+
assert f() == {}
13+
assert f(par1="hi") == {"par1": "hi"}
14+
assert f(par2=4) == {"par2": 4}
15+
assert f(par1="hi", par2=4) == {"par1": "hi", "par2": 4}

packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py

Lines changed: 23 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -92,32 +92,33 @@ class RunningDynamicServiceDetails(ServiceDetails):
9292
ignored_types=(cached_property,),
9393
json_schema_extra={
9494
"examples": [
95+
# legacy
9596
{
96-
"boot_type": "V0",
97-
"key": "simcore/services/dynamic/3dviewer",
98-
"version": "2.4.5",
99-
"user_id": 234,
100-
"project_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe",
101-
"uuid": "75c7f3f4-18f9-4678-8610-54a2ade78eaa",
102-
"basepath": "/x/75c7f3f4-18f9-4678-8610-54a2ade78eaa",
103-
"host": "3dviewer_75c7f3f4-18f9-4678-8610-54a2ade78eaa",
104-
"internal_port": 8888,
105-
"state": "running",
106-
"message": "",
107-
"node_uuid": "75c7f3f4-18f9-4678-8610-54a2ade78eaa",
97+
"service_key": "simcore/services/dynamic/raw-graphs",
98+
"service_version": "2.10.6",
99+
"user_id": 1,
100+
"project_id": "32fb4eb6-ab30-11ef-9ee4-0242ac140008",
101+
"service_uuid": "0cd049ba-cd6b-4a12-b416-a50c9bc8e7bb",
102+
"service_basepath": "/x/0cd049ba-cd6b-4a12-b416-a50c9bc8e7bb",
103+
"service_host": "raw-graphs_0cd049ba-cd6b-4a12-b416-a50c9bc8e7bb",
104+
"service_port": 4000,
105+
"published_port": None,
106+
"entry_point": "",
107+
"service_state": "running",
108+
"service_message": "",
108109
},
110+
# new style
109111
{
112+
"service_key": "simcore/services/dynamic/jupyter-math",
113+
"service_version": "3.0.3",
114+
"user_id": 1,
115+
"project_id": "32fb4eb6-ab30-11ef-9ee4-0242ac140008",
116+
"service_uuid": "6e3cad3a-eb64-43de-b476-9ac3c413fd9c",
110117
"boot_type": "V2",
111-
"key": "simcore/services/dynamic/dy-static-file-viewer-dynamic-sidecar",
112-
"version": "1.0.0",
113-
"user_id": 234,
114-
"project_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe",
115-
"uuid": "75c7f3f4-18f9-4678-8610-54a2ade78eaa",
116-
"host": "dy-sidecar_75c7f3f4-18f9-4678-8610-54a2ade78eaa",
117-
"internal_port": 80,
118-
"state": "running",
119-
"message": "",
120-
"node_uuid": "75c7f3f4-18f9-4678-8610-54a2ade78eaa",
118+
"service_host": "dy-sidecar_6e3cad3a-eb64-43de-b476-9ac3c413fd9c",
119+
"service_port": 8888,
120+
"service_state": "running",
121+
"service_message": "",
121122
},
122123
]
123124
},

packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,10 @@
88
DynamicServiceStop,
99
)
1010
from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle
11+
from models_library.projects import ProjectID
1112
from models_library.projects_nodes_io import NodeID
1213
from models_library.rabbitmq_basic_types import RPCMethodName
14+
from models_library.users import UserID
1315
from pydantic import NonNegativeInt, TypeAdapter
1416
from servicelib.logging_utils import log_decorator
1517
from servicelib.rabbitmq import RabbitMQRPCClient
@@ -29,6 +31,24 @@
2931
_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName)
3032

3133

34+
@log_decorator(_logger, level=logging.DEBUG)
35+
async def list_tracked_dynamic_services(
36+
rabbitmq_rpc_client: RabbitMQRPCClient,
37+
*,
38+
user_id: UserID | None = None,
39+
project_id: ProjectID | None = None,
40+
) -> list[DynamicServiceGet]:
41+
result = await rabbitmq_rpc_client.request(
42+
DYNAMIC_SCHEDULER_RPC_NAMESPACE,
43+
_RPC_METHOD_NAME_ADAPTER.validate_python("list_tracked_dynamic_services"),
44+
user_id=user_id,
45+
project_id=project_id,
46+
timeout_s=_RPC_DEFAULT_TIMEOUT_S,
47+
)
48+
assert isinstance(result, list) # nosec
49+
return result
50+
51+
3252
@log_decorator(_logger, level=logging.DEBUG)
3353
async def get_service_status(
3454
rabbitmq_rpc_client: RabbitMQRPCClient, *, node_id: NodeID

requirements/constraints.txt

Lines changed: 1 addition & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -43,32 +43,15 @@ pytest-asyncio<0.24
4343
#
4444
# Bugs
4545
#
46-
46+
httpx!=0.28.0 # Waiting for fix in respx: https://github.com/lundberg/respx/pull/278
4747

4848

4949

5050
#
5151
# Compatibility/coordination -----------------------------------------------------------------------------------------
5252
#
53-
54-
5553
pydantic>=2.10.0 # Avoids inter-version compatibility serialization errors as: _pickle.UnpicklingError: NEWOBJ class argument must be a type, not _AnnotatedAlias
5654

57-
# constraint since https://github.com/MagicStack/uvloop/releases/tag/v0.15.0: drops support for 3.5/3.6 Feb.2021
58-
uvloop<0.15.0 ; python_version < '3.7'
59-
60-
# All backports libraries add environ markers
61-
# NOTE: If >second dependency, this will annotate a marker in the compiled requirements file
62-
#
63-
async-exit-stack ; python_version < '3.7'
64-
async-generator ; python_version < '3.7'
65-
contextvars ; python_version < '3.7'
66-
dataclasses ; python_version < '3.7'
67-
importlib-metadata ; python_version < '3.8'
68-
importlib-resources ; python_version < '3.9'
69-
typing-extensions ; python_version < '3.7'
70-
zipp ; python_version < '3.7'
71-
7255

7356

7457
#

0 commit comments

Comments
 (0)