Skip to content

Commit 92dc0b0

Browse files
committed
Merge branch 'master' into upgrade-api-server-dependencies
2 parents b5239b7 + bf06671 commit 92dc0b0

File tree

9 files changed

+158
-81
lines changed

9 files changed

+158
-81
lines changed

.env-devel

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,12 @@ AGENT_VOLUMES_CLEANUP_S3_ENDPOINT=http://172.17.0.1:9001
1717
AGENT_VOLUMES_CLEANUP_S3_PROVIDER=MINIO
1818
AGENT_VOLUMES_CLEANUP_S3_REGION=us-east-1
1919
AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY=12345678
20-
AGENT_TRACING={}
20+
AGENT_TRACING=null
2121

2222
API_SERVER_DEV_FEATURES_ENABLED=0
2323
API_SERVER_LOGLEVEL=INFO
2424
API_SERVER_PROFILING=1
25-
API_SERVER_TRACING={}
25+
API_SERVER_TRACING=null
2626
TRAEFIK_API_SERVER_INFLIGHTREQ_AMOUNT=25
2727

2828
AUTOSCALING_DASK=null
@@ -35,7 +35,7 @@ AUTOSCALING_LOGLEVEL=INFO
3535
AUTOSCALING_NODES_MONITORING=null
3636
AUTOSCALING_POLL_INTERVAL="00:00:10"
3737
AUTOSCALING_SSM_ACCESS=null
38-
AUTOSCALING_TRACING={}
38+
AUTOSCALING_TRACING=null
3939

4040
AWS_S3_CLI_S3=null
4141

@@ -47,7 +47,7 @@ CATALOG_PORT=8000
4747
CATALOG_PROFILING=1
4848
CATALOG_SERVICES_DEFAULT_RESOURCES='{"CPU": {"limit": 0.1, "reservation": 0.1}, "RAM": {"limit": 2147483648, "reservation": 2147483648}}'
4949
CATALOG_SERVICES_DEFAULT_SPECIFICATIONS='{}'
50-
CATALOG_TRACING={}
50+
CATALOG_TRACING=null
5151

5252
CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}'
5353
CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG=master-github-latest
@@ -61,7 +61,7 @@ CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION=5
6161
CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES=null
6262
CLUSTERS_KEEPER_TASK_INTERVAL=00:00:30
6363
CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES=null
64-
CLUSTERS_KEEPER_TRACING={}
64+
CLUSTERS_KEEPER_TRACING=null
6565

6666
DASK_SCHEDULER_HOST=dask-scheduler
6767
DASK_SCHEDULER_PORT=8786
@@ -81,7 +81,7 @@ DIRECTOR_PUBLISHED_HOST_NAME="127.0.0.1:9081"
8181
DIRECTOR_REGISTRY_CACHING_TTL=00:15:00
8282
DIRECTOR_REGISTRY_CACHING=True
8383
DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS=null
84-
DIRECTOR_TRACING={}
84+
DIRECTOR_TRACING=null
8585

8686
EFS_USER_ID=8006
8787
EFS_USER_NAME=efs
@@ -90,11 +90,11 @@ EFS_GROUP_NAME=efs-group
9090
EFS_DNS_NAME=fs-xxx.efs.us-east-1.amazonaws.com
9191
EFS_MOUNTED_PATH=/tmp/efs
9292
EFS_PROJECT_SPECIFIC_DATA_DIRECTORY=project-specific-data
93-
EFS_GUARDIAN_TRACING={}
93+
EFS_GUARDIAN_TRACING=null
9494
EFS_DEFAULT_USER_SERVICE_SIZE_BYTES=10000
9595

9696
# DATCORE_ADAPTER
97-
DATCORE_ADAPTER_TRACING={}
97+
DATCORE_ADAPTER_TRACING=null
9898

9999
# DIRECTOR_V2 ----
100100
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}'
@@ -121,14 +121,14 @@ DYNAMIC_SIDECAR_LOG_LEVEL=DEBUG
121121
DYNAMIC_SIDECAR_PROMETHEUS_MONITORING_NETWORKS=[]
122122
DYNAMIC_SIDECAR_PROMETHEUS_SERVICE_LABELS={}
123123
DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT=01:00:00
124-
DIRECTOR_V2_TRACING={}
124+
DIRECTOR_V2_TRACING=null
125125

126126
# DYNAMIC_SCHEDULER ----
127127
DYNAMIC_SCHEDULER_LOGLEVEL=DEBUG
128128
DYNAMIC_SCHEDULER_PROFILING=1
129129
DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER=0
130130
DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT=01:00:00
131-
DYNAMIC_SCHEDULER_TRACING={}
131+
DYNAMIC_SCHEDULER_TRACING=null
132132
DYNAMIC_SCHEDULER_UI_STORAGE_SECRET=adminadmin
133133

134134
FUNCTION_SERVICES_AUTHORS='{"UN": {"name": "Unknown", "email": "[email protected]", "affiliation": "unknown"}}'
@@ -143,7 +143,7 @@ INVITATIONS_PORT=8000
143143
INVITATIONS_SECRET_KEY='REPLACE_ME_with_result__Fernet_generate_key='
144144
INVITATIONS_SWAGGER_API_DOC_ENABLED=1
145145
INVITATIONS_USERNAME=admin
146-
INVITATIONS_TRACING={}
146+
INVITATIONS_TRACING=null
147147

148148
LOG_FORMAT_LOCAL_DEV_ENABLED=1
149149
LOG_FILTER_MAPPING='{}'
@@ -168,7 +168,7 @@ PAYMENTS_STRIPE_API_SECRET='REPLACE_ME_with_api_secret'
168168
PAYMENTS_STRIPE_URL=https://api.stripe.com
169169
PAYMENTS_SWAGGER_API_DOC_ENABLED=1
170170
PAYMENTS_USERNAME=admin
171-
PAYMENTS_TRACING={}
171+
PAYMENTS_TRACING=null
172172

173173
POSTGRES_DB=simcoredb
174174
POSTGRES_ENDPOINT=postgres:5432
@@ -209,7 +209,7 @@ RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_CHECK_ENABLED=1
209209
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL=6
210210
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC=300
211211
RESOURCE_USAGE_TRACKER_S3=null
212-
RESOURCE_USAGE_TRACKER_TRACING={}
212+
RESOURCE_USAGE_TRACKER_TRACING=null
213213

214214
# NOTE: 172.17.0.1 is the docker0 interface, which redirect from inside a container onto the host network interface.
215215
R_CLONE_OPTION_BUFFER_SIZE=16M
@@ -243,7 +243,7 @@ STORAGE_HOST=storage
243243
STORAGE_LOGLEVEL=INFO
244244
STORAGE_PORT=8080
245245
STORAGE_PROFILING=1
246-
STORAGE_TRACING={}
246+
STORAGE_TRACING=null
247247
# STORAGE ----
248248

249249
SWARM_STACK_NAME=master-simcore
@@ -389,6 +389,6 @@ WEBSERVER_SOCKETIO=1
389389
WEBSERVER_STATICWEB={}
390390
WEBSERVER_STUDIES_DISPATCHER={}
391391
WEBSERVER_TAGS=1
392-
WEBSERVER_TRACING={}
392+
WEBSERVER_TRACING=null
393393
WEBSERVER_USERS={}
394394
WEBSERVER_VERSION_CONTROL=1

.github/workflows/ci-testing-deploy.yml

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2664,11 +2664,14 @@ jobs:
26642664

26652665
system-api-specs:
26662666
needs: [changes]
2667-
if: ${{ needs.changes.outputs.anything-py == 'true' && github.event_name == 'push' && github.event.pull_request != null }}
2667+
if: ${{ needs.changes.outputs.anything-py == 'true' || github.event_name == 'push' }}
26682668
timeout-minutes: 10
26692669
name: "[sys] check api-specs are up to date"
26702670
runs-on: ubuntu-latest
26712671
steps:
2672+
- name: Ensure job passes if not PR # ensure pass so upstream jobs which depend on this will run (dockerhub deployment)
2673+
if: ${{ github.event.pull_request == null }}
2674+
run: echo "::notice Passing job because not in PR"; exit 0
26722675
- name: setup python environment
26732676
uses: actions/setup-python@v5
26742677
with:
@@ -2690,11 +2693,14 @@ jobs:
26902693
26912694
system-backwards-compatibility:
26922695
needs: [changes, system-api-specs]
2693-
if: ${{ needs.changes.outputs.anything-py == 'true' && github.event_name == 'push' && github.event.pull_request != null }}
2696+
if: ${{ needs.changes.outputs.anything-py == 'true' || github.event_name == 'push' }}
26942697
timeout-minutes: 10
26952698
name: "[sys] api-server backwards compatibility"
26962699
runs-on: ubuntu-latest
26972700
steps:
2701+
- name: Ensure job passes if not PR # ensure pass so upstream jobs which depend on this will run (dockerhub deployment)
2702+
if: ${{ github.event.pull_request == null }}
2703+
run: echo "::notice Passing job because not in PR"; exit 0
26982704
- name: setup python environment
26992705
uses: actions/setup-python@v5
27002706
with:
Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,26 @@
1+
import logging
2+
13
from fastapi import APIRouter, FastAPI
4+
from servicelib.logging_utils import log_context
25

36
from ..._meta import API_VTAG
47
from . import _health, _meta, _resource_tracker
58

9+
_logger = logging.getLogger(__name__)
10+
611

712
def setup_api_routes(app: FastAPI):
813
"""
914
Composes resources/sub-resources routers
1015
"""
11-
app.include_router(_health.router)
16+
with log_context(
17+
_logger,
18+
logging.INFO,
19+
msg="RUT setup_api_routes",
20+
):
21+
app.include_router(_health.router)
1222

13-
api_router = APIRouter(prefix=f"/{API_VTAG}")
14-
api_router.include_router(_meta.router, tags=["meta"])
15-
api_router.include_router(_resource_tracker.router)
16-
app.include_router(api_router)
23+
api_router = APIRouter(prefix=f"/{API_VTAG}")
24+
api_router.include_router(_meta.router, tags=["meta"])
25+
api_router.include_router(_resource_tracker.router)
26+
app.include_router(api_router)
Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,34 @@
1+
import logging
2+
13
from fastapi import FastAPI
24
from models_library.api_schemas_resource_usage_tracker import (
35
RESOURCE_USAGE_TRACKER_RPC_NAMESPACE,
46
)
7+
from servicelib.logging_utils import log_context
58
from servicelib.rabbitmq import RPCRouter
69

710
from ...services.modules.rabbitmq import get_rabbitmq_rpc_server
811
from . import _resource_tracker
912

13+
_logger = logging.getLogger(__name__)
14+
15+
1016
ROUTERS: list[RPCRouter] = [
1117
_resource_tracker.router,
1218
]
1319

1420

1521
def setup_rpc_api_routes(app: FastAPI) -> None:
1622
async def startup() -> None:
17-
rpc_server = get_rabbitmq_rpc_server(app)
18-
for router in ROUTERS:
19-
await rpc_server.register_router(
20-
router, RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, app
21-
)
23+
with log_context(
24+
_logger,
25+
logging.INFO,
26+
msg="RUT startup RPC API Routes",
27+
):
28+
rpc_server = get_rabbitmq_rpc_server(app)
29+
for router in ROUTERS:
30+
await rpc_server.register_router(
31+
router, RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, app
32+
)
2233

2334
app.add_event_handler("startup", startup)
Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,28 @@
1+
import logging
2+
13
from fastapi import FastAPI
24
from servicelib.fastapi.db_asyncpg_engine import close_db_connection, connect_to_db
5+
from servicelib.logging_utils import log_context
6+
7+
_logger = logging.getLogger(__name__)
38

49

510
def setup(app: FastAPI):
611
async def on_startup() -> None:
7-
await connect_to_db(app, app.state.settings.RESOURCE_USAGE_TRACKER_POSTGRES)
12+
with log_context(
13+
_logger,
14+
logging.INFO,
15+
msg="RUT startup DB",
16+
):
17+
await connect_to_db(app, app.state.settings.RESOURCE_USAGE_TRACKER_POSTGRES)
818

919
async def on_shutdown() -> None:
10-
await close_db_connection(app)
20+
with log_context(
21+
_logger,
22+
logging.INFO,
23+
msg="RUT shutdown DB",
24+
):
25+
await close_db_connection(app)
1126

1227
app.add_event_handler("startup", on_startup)
1328
app.add_event_handler("shutdown", on_shutdown)

services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/rabbitmq.py

Lines changed: 30 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33

44
from fastapi import FastAPI
55
from fastapi.requests import Request
6+
from servicelib.logging_utils import log_context
67
from servicelib.rabbitmq import (
78
RabbitMQClient,
89
RabbitMQRPCClient,
@@ -12,32 +13,42 @@
1213

1314
from ...exceptions.errors import ConfigurationError
1415

15-
logger = logging.getLogger(__name__)
16+
_logger = logging.getLogger(__name__)
1617

1718

1819
def setup(app: FastAPI) -> None:
1920
async def on_startup() -> None:
20-
app.state.rabbitmq_client = None
21-
settings: RabbitSettings | None = (
22-
app.state.settings.RESOURCE_USAGE_TRACKER_RABBITMQ
23-
)
24-
if not settings:
25-
raise ConfigurationError(
26-
msg="Rabbit MQ client is de-activated in the settings"
21+
with log_context(
22+
_logger,
23+
logging.INFO,
24+
msg="RUT startup Rabbitmq",
25+
):
26+
app.state.rabbitmq_client = None
27+
settings: RabbitSettings | None = (
28+
app.state.settings.RESOURCE_USAGE_TRACKER_RABBITMQ
29+
)
30+
if not settings:
31+
raise ConfigurationError(
32+
msg="Rabbit MQ client is de-activated in the settings"
33+
)
34+
await wait_till_rabbitmq_responsive(settings.dsn)
35+
app.state.rabbitmq_client = RabbitMQClient(
36+
client_name="resource-usage-tracker", settings=settings
37+
)
38+
app.state.rabbitmq_rpc_server = await RabbitMQRPCClient.create(
39+
client_name="resource_usage_tracker_rpc_server", settings=settings
2740
)
28-
await wait_till_rabbitmq_responsive(settings.dsn)
29-
app.state.rabbitmq_client = RabbitMQClient(
30-
client_name="resource-usage-tracker", settings=settings
31-
)
32-
app.state.rabbitmq_rpc_server = await RabbitMQRPCClient.create(
33-
client_name="resource_usage_tracker_rpc_server", settings=settings
34-
)
3541

3642
async def on_shutdown() -> None:
37-
if app.state.rabbitmq_client:
38-
await app.state.rabbitmq_client.close()
39-
if app.state.rabbitmq_rpc_server:
40-
await app.state.rabbitmq_rpc_server.close()
43+
with log_context(
44+
_logger,
45+
logging.INFO,
46+
msg="RUT shutdown Rabbitmq",
47+
):
48+
if app.state.rabbitmq_client:
49+
await app.state.rabbitmq_client.close()
50+
if app.state.rabbitmq_rpc_server:
51+
await app.state.rabbitmq_rpc_server.close()
4152

4253
app.add_event_handler("startup", on_startup)
4354
app.add_event_handler("shutdown", on_shutdown)

services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py

Lines changed: 22 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2,28 +2,39 @@
22
from typing import cast
33

44
from fastapi import FastAPI
5+
from servicelib.logging_utils import log_context
56
from servicelib.redis import RedisClientSDK
67
from settings_library.redis import RedisDatabase, RedisSettings
78

89
from ..._meta import APP_NAME
910

10-
logger = logging.getLogger(__name__)
11+
_logger = logging.getLogger(__name__)
1112

1213

1314
def setup(app: FastAPI) -> None:
1415
async def on_startup() -> None:
15-
app.state.redis_client_sdk = None
16-
settings: RedisSettings = app.state.settings.RESOURCE_USAGE_TRACKER_REDIS
17-
redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS)
18-
app.state.redis_client_sdk = client = RedisClientSDK(
19-
redis_locks_dsn, client_name=APP_NAME
20-
)
21-
await client.setup()
16+
with log_context(
17+
_logger,
18+
logging.INFO,
19+
msg="RUT startup Redis",
20+
):
21+
app.state.redis_client_sdk = None
22+
settings: RedisSettings = app.state.settings.RESOURCE_USAGE_TRACKER_REDIS
23+
redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS)
24+
app.state.redis_client_sdk = client = RedisClientSDK(
25+
redis_locks_dsn, client_name=APP_NAME
26+
)
27+
await client.setup()
2228

2329
async def on_shutdown() -> None:
24-
redis_client_sdk: None | RedisClientSDK = app.state.redis_client_sdk
25-
if redis_client_sdk:
26-
await redis_client_sdk.shutdown()
30+
with log_context(
31+
_logger,
32+
logging.INFO,
33+
msg="RUT shutdown Redis",
34+
):
35+
redis_client_sdk: None | RedisClientSDK = app.state.redis_client_sdk
36+
if redis_client_sdk:
37+
await redis_client_sdk.shutdown()
2738

2839
app.add_event_handler("startup", on_startup)
2940
app.add_event_handler("shutdown", on_shutdown)

0 commit comments

Comments
 (0)