Skip to content

Commit a63532a

Browse files
Merge remote-tracking branch 'upstream/master' into add-notifications-service
2 parents 158f780 + 3e05de4 commit a63532a

File tree

243 files changed

+2699
-1715
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

243 files changed

+2699
-1715
lines changed

.codecov.yml

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,6 @@ component_management:
2525
branches:
2626
- "!master"
2727
individual_components:
28-
- component_id: api
29-
paths:
30-
- api/**
3128
- component_id: pkg_aws_library
3229
paths:
3330
- packages/aws-library/**
@@ -133,6 +130,7 @@ comment:
133130

134131

135132
ignore:
133+
- "api/tests"
136134
- "test_*.py"
137135
- "**/generated_models/*.py"
138136
- "**/generated_code/*.py"

.env-devel

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,12 @@ AGENT_VOLUMES_CLEANUP_S3_ENDPOINT=http://172.17.0.1:9001
1717
AGENT_VOLUMES_CLEANUP_S3_PROVIDER=MINIO
1818
AGENT_VOLUMES_CLEANUP_S3_REGION=us-east-1
1919
AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY=12345678
20-
AGENT_TRACING=null
20+
AGENT_TRACING={}
2121

2222
API_SERVER_DEV_FEATURES_ENABLED=0
2323
API_SERVER_LOGLEVEL=INFO
2424
API_SERVER_PROFILING=1
25-
API_SERVER_TRACING=null
25+
API_SERVER_TRACING={}
2626
TRAEFIK_API_SERVER_INFLIGHTREQ_AMOUNT=25
2727

2828
AUTOSCALING_DASK=null
@@ -35,7 +35,7 @@ AUTOSCALING_LOGLEVEL=INFO
3535
AUTOSCALING_NODES_MONITORING=null
3636
AUTOSCALING_POLL_INTERVAL="00:00:10"
3737
AUTOSCALING_SSM_ACCESS=null
38-
AUTOSCALING_TRACING=null
38+
AUTOSCALING_TRACING={}
3939

4040
AWS_S3_CLI_S3=null
4141

@@ -47,7 +47,7 @@ CATALOG_PORT=8000
4747
CATALOG_PROFILING=1
4848
CATALOG_SERVICES_DEFAULT_RESOURCES='{"CPU": {"limit": 0.1, "reservation": 0.1}, "RAM": {"limit": 2147483648, "reservation": 2147483648}}'
4949
CATALOG_SERVICES_DEFAULT_SPECIFICATIONS='{}'
50-
CATALOG_TRACING=null
50+
CATALOG_TRACING={}
5151

5252
CELERY_RESULT_EXPIRES=P7D
5353

@@ -63,7 +63,7 @@ CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION=5
6363
CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES=null
6464
CLUSTERS_KEEPER_TASK_INTERVAL=00:00:30
6565
CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES=null
66-
CLUSTERS_KEEPER_TRACING=null
66+
CLUSTERS_KEEPER_TRACING={}
6767

6868
DASK_SCHEDULER_HOST=dask-scheduler
6969
DASK_SCHEDULER_PORT=8786
@@ -83,7 +83,7 @@ DIRECTOR_PUBLISHED_HOST_NAME="127.0.0.1:9081"
8383
DIRECTOR_REGISTRY_CACHING_TTL=00:15:00
8484
DIRECTOR_REGISTRY_CACHING=True
8585
DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS=null
86-
DIRECTOR_TRACING=null
86+
DIRECTOR_TRACING={}
8787

8888
DOCKER_API_PROXY_HOST=docker-api-proxy
8989
DOCKER_API_PROXY_PASSWORD=admin
@@ -98,11 +98,11 @@ EFS_GROUP_NAME=efs-group
9898
EFS_DNS_NAME=fs-xxx.efs.us-east-1.amazonaws.com
9999
EFS_MOUNTED_PATH=/tmp/efs
100100
EFS_PROJECT_SPECIFIC_DATA_DIRECTORY=project-specific-data
101-
EFS_GUARDIAN_TRACING=null
101+
EFS_GUARDIAN_TRACING={}
102102
EFS_DEFAULT_USER_SERVICE_SIZE_BYTES=10000
103103

104104
# DATCORE_ADAPTER
105-
DATCORE_ADAPTER_TRACING=null
105+
DATCORE_ADAPTER_TRACING={}
106106

107107
# DIRECTOR_V2 ----
108108
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}'
@@ -128,14 +128,14 @@ DYNAMIC_SIDECAR_LOG_LEVEL=DEBUG
128128
DYNAMIC_SIDECAR_PROMETHEUS_MONITORING_NETWORKS=[]
129129
DYNAMIC_SIDECAR_PROMETHEUS_SERVICE_LABELS={}
130130
DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT=01:00:00
131-
DIRECTOR_V2_TRACING=null
131+
DIRECTOR_V2_TRACING={}
132132

133133
# DYNAMIC_SCHEDULER ----
134134
DYNAMIC_SCHEDULER_LOGLEVEL=INFO
135135
DYNAMIC_SCHEDULER_PROFILING=1
136136
DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER=0
137137
DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT=01:00:00
138-
DYNAMIC_SCHEDULER_TRACING=null
138+
DYNAMIC_SCHEDULER_TRACING={}
139139
DYNAMIC_SCHEDULER_UI_STORAGE_SECRET=adminadmin
140140

141141
FUNCTION_SERVICES_AUTHORS='{"UN": {"name": "Unknown", "email": "[email protected]", "affiliation": "unknown"}}'
@@ -158,13 +158,13 @@ INVITATIONS_PORT=8000
158158
INVITATIONS_SECRET_KEY='REPLACE_ME_with_result__Fernet_generate_key='
159159
INVITATIONS_SWAGGER_API_DOC_ENABLED=1
160160
INVITATIONS_USERNAME=admin
161-
INVITATIONS_TRACING=null
161+
INVITATIONS_TRACING={}
162162

163163
LOG_FORMAT_LOCAL_DEV_ENABLED=1
164-
LOG_FILTER_MAPPING='{}'
164+
LOG_FILTER_MAPPING='{"gunicorn.access":[" /v0/ ", " /v0/health "], "uvicorn.access":[" / "]}'
165165

166166
NOTIFICATIONS_LOGLEVEL=INFO
167-
NOTIFICATIONS_TRACING=null
167+
NOTIFICATIONS_TRACING={}
168168

169169
PAYMENTS_ACCESS_TOKEN_EXPIRE_MINUTES=30
170170
PAYMENTS_ACCESS_TOKEN_SECRET_KEY=2c0411810565e063309be1457009fb39ce023946f6a354e6935107b57676
@@ -186,7 +186,7 @@ PAYMENTS_STRIPE_API_SECRET='REPLACE_ME_with_api_secret'
186186
PAYMENTS_STRIPE_URL=https://api.stripe.com
187187
PAYMENTS_SWAGGER_API_DOC_ENABLED=1
188188
PAYMENTS_USERNAME=admin
189-
PAYMENTS_TRACING=null
189+
PAYMENTS_TRACING={}
190190

191191
POSTGRES_DB=simcoredb
192192
POSTGRES_ENDPOINT=postgres:5432
@@ -227,7 +227,7 @@ RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_CHECK_ENABLED=1
227227
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL=6
228228
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC=300
229229
RESOURCE_USAGE_TRACKER_S3=null
230-
RESOURCE_USAGE_TRACKER_TRACING=null
230+
RESOURCE_USAGE_TRACKER_TRACING={}
231231

232232
# NOTE: 172.17.0.1 is the docker0 interface, which redirect from inside a container onto the host network interface.
233233
R_CLONE_OPTION_BUFFER_SIZE=16M
@@ -259,7 +259,7 @@ STORAGE_HOST=storage
259259
STORAGE_LOGLEVEL=INFO
260260
STORAGE_PORT=8080
261261
STORAGE_PROFILING=1
262-
STORAGE_TRACING=null
262+
STORAGE_TRACING={}
263263
# STORAGE ----
264264

265265
SWARM_STACK_NAME=master-simcore
@@ -300,7 +300,7 @@ WB_GC_SOCKETIO=1
300300
WB_GC_STATICWEB=null
301301
WB_GC_STUDIES_DISPATCHER=null
302302
WB_GC_TAGS=0
303-
WB_GC_TRACING=null
303+
WB_GC_TRACING={}
304304
WB_GC_USERS={}
305305
WB_GC_WALLETS=0
306306

@@ -330,7 +330,7 @@ WB_DB_EL_STATICWEB=null
330330
WB_DB_EL_STORAGE=null
331331
WB_DB_EL_STUDIES_DISPATCHER=null
332332
WB_DB_EL_TAGS=0
333-
WB_DB_EL_TRACING=null
333+
WB_DB_EL_TRACING={}
334334
WB_DB_EL_USERS={}
335335
WB_DB_EL_WALLETS=0
336336

@@ -401,5 +401,5 @@ WEBSERVER_SOCKETIO=1
401401
WEBSERVER_STATICWEB={}
402402
WEBSERVER_STUDIES_DISPATCHER={}
403403
WEBSERVER_TAGS=1
404-
WEBSERVER_TRACING=null
404+
WEBSERVER_TRACING={}
405405
WEBSERVER_USERS={}

api/specs/web-server/_auth.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from models_library.rest_error import EnvelopedError, Log
1616
from pydantic import BaseModel, Field, confloat
1717
from simcore_service_webserver._meta import API_VTAG
18-
from simcore_service_webserver.login._controller.rest.auth import (
18+
from simcore_service_webserver.login._controller.rest.auth_schemas import (
1919
LoginBody,
2020
LoginNextPage,
2121
LoginTwoFactorAuthBody,
@@ -30,7 +30,7 @@
3030
PhoneConfirmationBody,
3131
ResetPasswordConfirmation,
3232
)
33-
from simcore_service_webserver.login._controller.rest.registration import (
33+
from simcore_service_webserver.login._controller.rest.registration_schemas import (
3434
InvitationCheck,
3535
InvitationInfo,
3636
RegisterBody,

packages/aws-library/requirements/_base.txt

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -140,6 +140,7 @@ opentelemetry-api==1.34.1
140140
# opentelemetry-exporter-otlp-proto-http
141141
# opentelemetry-instrumentation
142142
# opentelemetry-instrumentation-aio-pika
143+
# opentelemetry-instrumentation-asyncpg
143144
# opentelemetry-instrumentation-botocore
144145
# opentelemetry-instrumentation-logging
145146
# opentelemetry-instrumentation-redis
@@ -160,12 +161,15 @@ opentelemetry-exporter-otlp-proto-http==1.34.1
160161
opentelemetry-instrumentation==0.55b1
161162
# via
162163
# opentelemetry-instrumentation-aio-pika
164+
# opentelemetry-instrumentation-asyncpg
163165
# opentelemetry-instrumentation-botocore
164166
# opentelemetry-instrumentation-logging
165167
# opentelemetry-instrumentation-redis
166168
# opentelemetry-instrumentation-requests
167169
opentelemetry-instrumentation-aio-pika==0.55b1
168170
# via -r requirements/../../../packages/service-library/requirements/_base.in
171+
opentelemetry-instrumentation-asyncpg==0.55b1
172+
# via -r requirements/../../../packages/service-library/requirements/_base.in
169173
opentelemetry-instrumentation-botocore==0.55b1
170174
# via -r requirements/_base.in
171175
opentelemetry-instrumentation-logging==0.55b1
@@ -189,6 +193,7 @@ opentelemetry-sdk==1.34.1
189193
opentelemetry-semantic-conventions==0.55b1
190194
# via
191195
# opentelemetry-instrumentation
196+
# opentelemetry-instrumentation-asyncpg
192197
# opentelemetry-instrumentation-botocore
193198
# opentelemetry-instrumentation-redis
194199
# opentelemetry-instrumentation-requests

packages/celery-library/requirements/_base.txt

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -132,6 +132,7 @@ opentelemetry-api==1.34.1
132132
# opentelemetry-exporter-otlp-proto-http
133133
# opentelemetry-instrumentation
134134
# opentelemetry-instrumentation-aio-pika
135+
# opentelemetry-instrumentation-asyncpg
135136
# opentelemetry-instrumentation-logging
136137
# opentelemetry-instrumentation-redis
137138
# opentelemetry-instrumentation-requests
@@ -150,11 +151,14 @@ opentelemetry-exporter-otlp-proto-http==1.34.1
150151
opentelemetry-instrumentation==0.55b1
151152
# via
152153
# opentelemetry-instrumentation-aio-pika
154+
# opentelemetry-instrumentation-asyncpg
153155
# opentelemetry-instrumentation-logging
154156
# opentelemetry-instrumentation-redis
155157
# opentelemetry-instrumentation-requests
156158
opentelemetry-instrumentation-aio-pika==0.55b1
157159
# via -r requirements/../../../packages/service-library/requirements/_base.in
160+
opentelemetry-instrumentation-asyncpg==0.55b1
161+
# via -r requirements/../../../packages/service-library/requirements/_base.in
158162
opentelemetry-instrumentation-logging==0.55b1
159163
# via -r requirements/../../../packages/service-library/requirements/_base.in
160164
opentelemetry-instrumentation-redis==0.55b1
@@ -174,6 +178,7 @@ opentelemetry-sdk==1.34.1
174178
opentelemetry-semantic-conventions==0.55b1
175179
# via
176180
# opentelemetry-instrumentation
181+
# opentelemetry-instrumentation-asyncpg
177182
# opentelemetry-instrumentation-redis
178183
# opentelemetry-instrumentation-requests
179184
# opentelemetry-sdk

packages/celery-library/src/celery_library/backends/_redis.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
from pydantic import ValidationError
88
from servicelib.celery.models import (
99
Task,
10-
TaskContext,
10+
TaskFilter,
1111
TaskID,
1212
TaskMetadata,
1313
TaskUUID,
@@ -82,10 +82,10 @@ async def get_task_progress(self, task_id: TaskID) -> ProgressReport | None:
8282
)
8383
return None
8484

85-
async def list_tasks(self, context: TaskContext) -> list[Task]:
85+
async def list_tasks(self, task_filter: TaskFilter) -> list[Task]:
8686
search_key = (
8787
_CELERY_TASK_INFO_PREFIX
88-
+ build_task_id_prefix(context)
88+
+ build_task_id_prefix(task_filter)
8989
+ _CELERY_TASK_ID_KEY_SEPARATOR
9090
)
9191
search_key_len = len(search_key)

packages/celery-library/src/celery_library/rpc/_async_jobs.py

Lines changed: 19 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,9 @@
44

55
from celery.exceptions import CeleryError # type: ignore[import-untyped]
66
from models_library.api_schemas_rpc_async_jobs.async_jobs import (
7+
AsyncJobFilter,
78
AsyncJobGet,
89
AsyncJobId,
9-
AsyncJobNameData,
1010
AsyncJobResult,
1111
AsyncJobStatus,
1212
)
@@ -16,7 +16,7 @@
1616
JobNotDoneError,
1717
JobSchedulerError,
1818
)
19-
from servicelib.celery.models import TaskState
19+
from servicelib.celery.models import TaskFilter, TaskState
2020
from servicelib.celery.task_manager import TaskManager
2121
from servicelib.logging_utils import log_catch
2222
from servicelib.rabbitmq import RPCRouter
@@ -32,13 +32,14 @@
3232

3333
@router.expose(reraise_if_error_type=(JobSchedulerError,))
3434
async def cancel(
35-
task_manager: TaskManager, job_id: AsyncJobId, job_id_data: AsyncJobNameData
35+
task_manager: TaskManager, job_id: AsyncJobId, job_filter: AsyncJobFilter
3636
):
3737
assert task_manager # nosec
38-
assert job_id_data # nosec
38+
assert job_filter # nosec
39+
task_filter = TaskFilter.model_validate(job_filter.model_dump())
3940
try:
4041
await task_manager.cancel_task(
41-
context=job_id_data.model_dump(),
42+
task_filter=task_filter,
4243
task_uuid=job_id,
4344
)
4445
except CeleryError as exc:
@@ -47,14 +48,15 @@ async def cancel(
4748

4849
@router.expose(reraise_if_error_type=(JobSchedulerError,))
4950
async def status(
50-
task_manager: TaskManager, job_id: AsyncJobId, job_id_data: AsyncJobNameData
51+
task_manager: TaskManager, job_id: AsyncJobId, job_filter: AsyncJobFilter
5152
) -> AsyncJobStatus:
5253
assert task_manager # nosec
53-
assert job_id_data # nosec
54+
assert job_filter # nosec
5455

56+
task_filter = TaskFilter.model_validate(job_filter.model_dump())
5557
try:
5658
task_status = await task_manager.get_task_status(
57-
context=job_id_data.model_dump(),
59+
task_filter=task_filter,
5860
task_uuid=job_id,
5961
)
6062
except CeleryError as exc:
@@ -76,21 +78,23 @@ async def status(
7678
)
7779
)
7880
async def result(
79-
task_manager: TaskManager, job_id: AsyncJobId, job_id_data: AsyncJobNameData
81+
task_manager: TaskManager, job_id: AsyncJobId, job_filter: AsyncJobFilter
8082
) -> AsyncJobResult:
8183
assert task_manager # nosec
8284
assert job_id # nosec
83-
assert job_id_data # nosec
85+
assert job_filter # nosec
86+
87+
task_filter = TaskFilter.model_validate(job_filter.model_dump())
8488

8589
try:
8690
_status = await task_manager.get_task_status(
87-
context=job_id_data.model_dump(),
91+
task_filter=task_filter,
8892
task_uuid=job_id,
8993
)
9094
if not _status.is_done:
9195
raise JobNotDoneError(job_id=job_id)
9296
_result = await task_manager.get_task_result(
93-
context=job_id_data.model_dump(),
97+
task_filter=task_filter,
9498
task_uuid=job_id,
9599
)
96100
except CeleryError as exc:
@@ -123,13 +127,14 @@ async def result(
123127

124128
@router.expose(reraise_if_error_type=(JobSchedulerError,))
125129
async def list_jobs(
126-
task_manager: TaskManager, filter_: str, job_id_data: AsyncJobNameData
130+
task_manager: TaskManager, filter_: str, job_filter: AsyncJobFilter
127131
) -> list[AsyncJobGet]:
128132
_ = filter_
129133
assert task_manager # nosec
134+
task_filter = TaskFilter.model_validate(job_filter.model_dump())
130135
try:
131136
tasks = await task_manager.list_tasks(
132-
context=job_id_data.model_dump(),
137+
task_filter=task_filter,
133138
)
134139
except CeleryError as exc:
135140
raise JobSchedulerError(exc=f"{exc}") from exc

0 commit comments

Comments
 (0)