Skip to content

Commit abd3323

Browse files
committed
✨ Refactor: Replace json.dumps with json_dumps for consistency across modules
1 parent d72188e commit abd3323

File tree

18 files changed

+53
-65
lines changed

18 files changed

+53
-65
lines changed

packages/models-library/src/models_library/utils/nodes.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
import hashlib
2-
import json
32
import logging
43
from collections.abc import Callable, Coroutine
54
from copy import deepcopy
65
from typing import Any
76

7+
from common_library.json_serialization import json_dumps
88
from pydantic import BaseModel, TypeAdapter
99

1010
from ..projects import Project
@@ -67,6 +67,6 @@ async def compute_node_hash(
6767
# now create the hash
6868
# WARNING: Here we cannot change to json_serialization.json_dumps because if would create a different dump string and therefore a different hash
6969
# NOTE that these hashes might have been already stored elsewhere
70-
block_string = json.dumps(resolved_payload, sort_keys=True).encode("utf-8")
70+
block_string = json_dumps(resolved_payload, sort_keys=True).encode("utf-8")
7171
raw_hash = hashlib.sha256(block_string)
7272
return raw_hash.hexdigest()

packages/postgres-database/src/simcore_postgres_database/models/products.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,10 @@
55
- Every product has a front-end with exactly the same name
66
"""
77

8-
import json
98
from typing import Literal
109

1110
import sqlalchemy as sa
11+
from common_library.json_serialization import json_dumps
1212
from sqlalchemy.dialects.postgresql import JSONB
1313
from sqlalchemy.sql import func
1414
from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict
@@ -114,7 +114,7 @@ class ProductLoginSettingsDict(TypedDict, total=False):
114114

115115
# NOTE: defaults affects migration!!
116116
LOGIN_SETTINGS_DEFAULT = ProductLoginSettingsDict() # = {}
117-
_LOGIN_SETTINGS_SERVER_DEFAULT = json.dumps(LOGIN_SETTINGS_DEFAULT)
117+
_LOGIN_SETTINGS_SERVER_DEFAULT = json_dumps(LOGIN_SETTINGS_DEFAULT)
118118

119119

120120
#

packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import json
21
import logging
32
import os
43
import socket
@@ -9,7 +8,7 @@
98
import tenacity
109
from aiopg.sa.engine import Engine
1110
from aiopg.sa.result import RowProxy
12-
from common_library.json_serialization import json_loads
11+
from common_library.json_serialization import json_dumps, json_loads
1312
from models_library.projects import ProjectID
1413
from models_library.users import UserID
1514
from servicelib.common_aiopg_utils import DataSourceName, create_pg_engine
@@ -145,7 +144,7 @@ async def get_ports_configuration_from_node_uuid(
145144
self._db_engine
146145
) as engine, engine.acquire() as connection:
147146
node: RowProxy = await _get_node_from_db(project_id, node_uuid, connection)
148-
node_json_config = json.dumps(
147+
node_json_config = json_dumps(
149148
{
150149
"schema": node.schema,
151150
"inputs": node.inputs,

services/autoscaling/src/simcore_service_autoscaling/utils/redis.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
import json
2-
1+
from common_library.json_serialization import json_dumps
32
from fastapi import FastAPI
43

54
from ..core.settings import ApplicationSettings
@@ -14,7 +13,7 @@ def create_lock_key_and_value(app: FastAPI) -> tuple[str, str]:
1413
"dynamic",
1514
*app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS,
1615
]
17-
lock_value = json.dumps(
16+
lock_value = json_dumps(
1817
{
1918
"node_labels": app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS
2019
}
@@ -24,7 +23,7 @@ def create_lock_key_and_value(app: FastAPI) -> tuple[str, str]:
2423
"computational",
2524
f"{app_settings.AUTOSCALING_DASK.DASK_MONITORING_URL}",
2625
]
27-
lock_value = json.dumps(
26+
lock_value = json_dumps(
2827
{"scheduler_url": f"{app_settings.AUTOSCALING_DASK.DASK_MONITORING_URL}"}
2928
)
3029
lock_key = ":".join(f"{k}" for k in lock_key_parts)

services/autoscaling/src/simcore_service_autoscaling/utils/utils_ec2.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,12 @@
1-
""" Free helper functions for AWS API
1+
"""Free helper functions for AWS API"""
22

3-
"""
4-
5-
import json
63
import logging
74
from collections import OrderedDict
85
from collections.abc import Callable
96
from textwrap import dedent
107

118
from aws_library.ec2 import AWSTagKey, AWSTagValue, EC2InstanceType, EC2Tags, Resources
9+
from common_library.json_serialization import json_dumps
1210

1311
from .._meta import VERSION
1412
from ..core.errors import ConfigurationError, TaskBestFittingInstanceNotFoundError
@@ -23,12 +21,12 @@ def get_ec2_tags_dynamic(app_settings: ApplicationSettings) -> EC2Tags:
2321
return {
2422
AWSTagKey("io.simcore.autoscaling.version"): AWSTagValue(f"{VERSION}"),
2523
AWSTagKey("io.simcore.autoscaling.monitored_nodes_labels"): AWSTagValue(
26-
json.dumps(
24+
json_dumps(
2725
app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS
2826
)
2927
),
3028
AWSTagKey("io.simcore.autoscaling.monitored_services_labels"): AWSTagValue(
31-
json.dumps(
29+
json_dumps(
3230
app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_SERVICE_LABELS
3331
)
3432
),

services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
import base64
22
import datetime
33
import functools
4-
import json
54
from pathlib import Path
65
from typing import Any, Final
76

87
import arrow
98
import yaml
109
from aws_library.ec2 import EC2InstanceBootSpecific, EC2InstanceData, EC2Tags
1110
from aws_library.ec2._models import CommandStr
11+
from common_library.json_serialization import json_dumps
1212
from common_library.serialization import model_dump_with_secrets
1313
from fastapi.encoders import jsonable_encoder
1414
from models_library.api_schemas_clusters_keeper.clusters import (
@@ -80,7 +80,7 @@ def _convert_to_env_list(entries: list[Any]) -> str:
8080
return f"[{entries_as_str}]"
8181

8282
def _convert_to_env_dict(entries: dict[str, Any]) -> str:
83-
return f"'{json.dumps(jsonable_encoder(entries))}'"
83+
return f"'{json_dumps(jsonable_encoder(entries))}'"
8484

8585
assert app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES # nosec
8686

services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import asyncio
2-
import json
32
import logging
43
import os
54
import socket
@@ -12,6 +11,7 @@
1211
from uuid import uuid4
1312

1413
from aiodocker import Docker
14+
from common_library.json_serialization import json_dumps
1515
from dask_task_models_library.container_tasks.docker import DockerBasicAuth
1616
from dask_task_models_library.container_tasks.errors import ServiceRuntimeError
1717
from dask_task_models_library.container_tasks.io import FileUrl, TaskOutputData
@@ -95,7 +95,7 @@ async def _write_input_data(
9595
# NOTE: temporary solution until new version is created
9696
for task in download_tasks:
9797
await task
98-
input_data_file.write_text(json.dumps(local_input_data_file))
98+
input_data_file.write_text(json_dumps(local_input_data_file))
9999

100100
await self._publish_sidecar_log("All the input data were downloaded.")
101101

services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import json
21
import logging
32
from collections.abc import Mapping
43
from datetime import datetime
@@ -10,6 +9,7 @@
109

1110
import arrow
1211
from common_library.error_codes import ErrorCodeStr
12+
from common_library.json_serialization import json_dumps
1313
from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceCreate
1414
from models_library.api_schemas_directorv2.dynamic_services_service import (
1515
CommonServiceDetails,
@@ -504,7 +504,7 @@ def from_http_request(
504504
"product_name": service.product_name,
505505
"paths_mapping": simcore_service_labels.paths_mapping,
506506
"callbacks_mapping": simcore_service_labels.callbacks_mapping,
507-
"compose_spec": json.dumps(simcore_service_labels.compose_spec),
507+
"compose_spec": json_dumps(simcore_service_labels.compose_spec),
508508
"container_http_entry": simcore_service_labels.container_http_entry,
509509
"restart_policy": simcore_service_labels.restart_policy,
510510
"dynamic_sidecar_network_name": names_helper.dynamic_sidecar_network_name,
@@ -541,7 +541,7 @@ def as_label_data(self) -> str:
541541
# compose_spec needs to be json encoded before encoding it to json
542542
# and storing it in the label
543543
return self.model_copy(
544-
update={"compose_spec": json.dumps(self.compose_spec)},
544+
update={"compose_spec": json_dumps(self.compose_spec)},
545545
deep=True,
546546
).model_dump_json()
547547

services/director-v2/src/simcore_service_director_v2/modules/dask_client.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
"""
1010

1111
import asyncio
12-
import json
1312
import logging
1413
import traceback
1514
from collections.abc import Callable
@@ -21,6 +20,7 @@
2120
import dask.typing
2221
import distributed
2322
from aiohttp import ClientResponseError
23+
from common_library.json_serialization import json_dumps
2424
from dask_task_models_library.container_tasks.docker import DockerBasicAuth
2525
from dask_task_models_library.container_tasks.errors import TaskCancelledError
2626
from dask_task_models_library.container_tasks.io import (
@@ -162,11 +162,11 @@ async def create(
162162
_logger.info(
163163
"Connection to %s succeeded [%s]",
164164
f"dask-scheduler at {endpoint}",
165-
json.dumps(attempt.retry_state.retry_object.statistics),
165+
json_dumps(attempt.retry_state.retry_object.statistics),
166166
)
167167
_logger.info(
168168
"Scheduler info:\n%s",
169-
json.dumps(backend.client.scheduler_info(), indent=2),
169+
json_dumps(backend.client.scheduler_info(), indent=2),
170170
)
171171
return instance
172172
# this is to satisfy pylance
@@ -439,14 +439,14 @@ async def get_tasks_status(self, job_ids: list[str]) -> list[DaskClientTaskState
439439
def _get_pipeline_statuses(
440440
dask_scheduler: distributed.Scheduler,
441441
) -> dict[dask.typing.Key, DaskSchedulerTaskState | None]:
442-
statuses: dict[
443-
dask.typing.Key, DaskSchedulerTaskState | None
444-
] = dask_scheduler.get_task_status(keys=job_ids)
442+
statuses: dict[dask.typing.Key, DaskSchedulerTaskState | None] = (
443+
dask_scheduler.get_task_status(keys=job_ids)
444+
)
445445
return statuses
446446

447-
task_statuses: dict[
448-
dask.typing.Key, DaskSchedulerTaskState | None
449-
] = await self.backend.client.run_on_scheduler(_get_pipeline_statuses)
447+
task_statuses: dict[dask.typing.Key, DaskSchedulerTaskState | None] = (
448+
await self.backend.client.run_on_scheduler(_get_pipeline_statuses)
449+
)
450450
assert isinstance(task_statuses, dict) # nosec
451451

452452
_logger.debug("found dask task statuses: %s", f"{task_statuses=}")
@@ -578,10 +578,10 @@ def _get_worker_used_resources(
578578

579579
with log_catch(_logger, reraise=False):
580580
# NOTE: this runs directly on the dask-scheduler and may rise exceptions
581-
used_resources_per_worker: dict[
582-
str, dict[str, Any]
583-
] = await dask_utils.wrap_client_async_routine(
584-
self.backend.client.run_on_scheduler(_get_worker_used_resources)
581+
used_resources_per_worker: dict[str, dict[str, Any]] = (
582+
await dask_utils.wrap_client_async_routine(
583+
self.backend.client.run_on_scheduler(_get_worker_used_resources)
584+
)
585585
)
586586

587587
# let's update the scheduler info, with default to 0s since sometimes

services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
import json
21
from typing import Any
32

3+
from common_library.json_serialization import json_dumps
44
from fastapi import FastAPI, status
55
from httpx import Response, Timeout
66
from models_library.services_creation import CreateServiceMetricsAdditionalParams
@@ -123,7 +123,7 @@ async def post_containers_ports_outputs_dirs(
123123
async def get_containers_name(
124124
self, dynamic_sidecar_endpoint: AnyHttpUrl, *, dynamic_sidecar_network_name: str
125125
) -> Response:
126-
filters = json.dumps(
126+
filters = json_dumps(
127127
{
128128
"network": dynamic_sidecar_network_name,
129129
"exclude": SUFFIX_EGRESS_PROXY_NAME,

0 commit comments

Comments
 (0)