From 531b43e05b1b3f483b4a0d55e539b356a1c2b613 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 14:45:16 +0200 Subject: [PATCH 01/19] =?UTF-8?q?=E2=9C=A8=20Refactor:=20Replace=20`json.l?= =?UTF-8?q?oads`=20with=20`json=5Floads`=20for=20consistency=20across=20mo?= =?UTF-8?q?dules?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/dask_task_models_library/container_tasks/io.py | 3 ++- .../models_library/function_services_catalog/_settings.py | 3 ++- .../src/models_library/utils/labels_annotations.py | 7 +++---- .../src/service_integration/cli/_config.py | 4 ++-- .../pytest_plugin/docker_integration.py | 3 ++- 5 files changed, 11 insertions(+), 9 deletions(-) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py index b9ed0e9f6de3..dc87c52b1210 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py @@ -3,6 +3,7 @@ from pathlib import Path from typing import Annotated, Any, TypeAlias +from common_library.json_serialization import json_loads from models_library.basic_regex import MIME_TYPE_RE from models_library.generics import DictModel from models_library.services_types import ServicePortKey @@ -160,7 +161,7 @@ def from_task_output( with suppress(json.JSONDecodeError): # NOTE: The suppression here is ok, since if the data is empty, # there will be a validation error anyway - data = json.loads(output_data_file.read_text()) + data = json_loads(output_data_file.read_text()) for output_key, output_params in schema.items(): if isinstance(output_params, FilePortSchema): diff --git a/packages/models-library/src/models_library/function_services_catalog/_settings.py b/packages/models-library/src/models_library/function_services_catalog/_settings.py index 05812b81879a..b55fc09b9d20 100644 --- a/packages/models-library/src/models_library/function_services_catalog/_settings.py +++ b/packages/models-library/src/models_library/function_services_catalog/_settings.py @@ -1,11 +1,12 @@ import json import os +from common_library.json_serialization import json_loads from pydantic_settings import BaseSettings # Expects env var: FUNCTION_SERVICES_AUTHORS='{"OM":{"name": ...}, "EN":{...} }' try: - AUTHORS = json.loads(os.environ.get("FUNCTION_SERVICES_AUTHORS", "{}")) + AUTHORS = json_loads(os.environ.get("FUNCTION_SERVICES_AUTHORS", "{}")) except json.decoder.JSONDecodeError: AUTHORS = {} diff --git a/packages/models-library/src/models_library/utils/labels_annotations.py b/packages/models-library/src/models_library/utils/labels_annotations.py index 4bb93347c63d..339d7d1b52ce 100644 --- a/packages/models-library/src/models_library/utils/labels_annotations.py +++ b/packages/models-library/src/models_library/utils/labels_annotations.py @@ -1,14 +1,13 @@ -""" Image labels annotations +"""Image labels annotations osparc expects the service configuration (in short: config) attached to the service's image as label annotations. This module defines how this config is serialized/deserialized to/from docker labels annotations """ -import json from json.decoder import JSONDecodeError from typing import Any, TypeAlias -from common_library.json_serialization import json_dumps +from common_library.json_serialization import json_dumps, json_loads LabelsAnnotationsDict: TypeAlias = dict[str, str | float | bool | None] @@ -57,7 +56,7 @@ def from_labels( for key, label in labels.items(): if key.startswith(f"{prefix_key}."): try: - value = json.loads(label) # type: ignore + value = json_loads(label) # type: ignore except JSONDecodeError: value = label diff --git a/packages/service-integration/src/service_integration/cli/_config.py b/packages/service-integration/src/service_integration/cli/_config.py index 7ae9c65f6894..b342d002cbf4 100644 --- a/packages/service-integration/src/service_integration/cli/_config.py +++ b/packages/service-integration/src/service_integration/cli/_config.py @@ -1,10 +1,10 @@ -import json from pathlib import Path from typing import Annotated, Final import rich import typer import yaml +from common_library.json_serialization import json_loads from models_library.utils.labels_annotations import LabelsAnnotationsDict from pydantic import BaseModel @@ -57,7 +57,7 @@ def _save(service_name: str, filename: Path, model: BaseModel): rich.print(f"Creating {output_path} ...", end="") with output_path.open("wt") as fh: - data = json.loads( + data = json_loads( model.model_dump_json(by_alias=True, exclude_none=True) ) yaml.safe_dump(data, fh, sort_keys=False) diff --git a/packages/service-integration/src/service_integration/pytest_plugin/docker_integration.py b/packages/service-integration/src/service_integration/pytest_plugin/docker_integration.py index 233869e7def0..ba2fd361f795 100644 --- a/packages/service-integration/src/service_integration/pytest_plugin/docker_integration.py +++ b/packages/service-integration/src/service_integration/pytest_plugin/docker_integration.py @@ -18,6 +18,7 @@ import jsonschema import pytest import yaml +from common_library.json_serialization import json_loads from docker.errors import APIError from docker.models.containers import Container @@ -206,7 +207,7 @@ def convert_to_simcore_labels(image_labels: dict) -> dict: io_simcore_labels = {} for key, value in image_labels.items(): if str(key).startswith("io.simcore."): - simcore_label = json.loads(value) + simcore_label = json_loads(value) simcore_keys = list(simcore_label.keys()) assert len(simcore_keys) == 1 simcore_key = simcore_keys[0] From 5065837ffb77ce198475d59345fb583a65138f6c Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 14:46:34 +0200 Subject: [PATCH 02/19] =?UTF-8?q?=E2=9C=A8=20Refactor:=20Replace=20`json.l?= =?UTF-8?q?oads`=20with=20`json=5Floads`=20for=20consistency=20across=20mo?= =?UTF-8?q?dules?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/servicelib/aiohttp/rest_middlewares.py | 5 ++--- packages/service-library/src/servicelib/rest_responses.py | 4 +++- .../src/simcore_sdk/node_ports_common/dbmanager.py | 4 +++- .../src/simcore_sdk/node_ports_common/file_io_utils.py | 7 +++---- .../src/simcore_sdk/node_ports_v2/serialization_v2.py | 5 ++--- 5 files changed, 13 insertions(+), 12 deletions(-) diff --git a/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py b/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py index d40abae5669d..8fdf88544982 100644 --- a/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py +++ b/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py @@ -3,7 +3,6 @@ SEE https://gist.github.com/amitripshtos/854da3f4217e3441e8fceea85b0cbd91 """ -import json import logging from collections.abc import Awaitable, Callable from typing import Any, Union @@ -12,7 +11,7 @@ from aiohttp.web_request import Request from aiohttp.web_response import StreamResponse from common_library.error_codes import create_error_code -from common_library.json_serialization import json_dumps +from common_library.json_serialization import json_dumps, json_loads from models_library.rest_error import ErrorGet, ErrorItemType, LogMessageType from ..logging_errors import create_troubleshotting_log_kwargs @@ -107,7 +106,7 @@ async def _middleware_handler(request: web.Request, handler: Handler): err.content_type = MIMETYPE_APPLICATION_JSON if err.text: try: - payload = json.loads(err.text) + payload = json_loads(err.text) if not is_enveloped_from_map(payload): payload = wrap_as_envelope(data=payload) err.text = json_dumps(payload) diff --git a/packages/service-library/src/servicelib/rest_responses.py b/packages/service-library/src/servicelib/rest_responses.py index daa374462785..9dc32ed9e5a7 100644 --- a/packages/service-library/src/servicelib/rest_responses.py +++ b/packages/service-library/src/servicelib/rest_responses.py @@ -2,6 +2,8 @@ from collections.abc import Mapping from typing import Any +from common_library.json_serialization import json_loads + _ENVELOPE_KEYS = ("data", "error") @@ -11,7 +13,7 @@ def is_enveloped_from_map(payload: Mapping) -> bool: def is_enveloped_from_text(text: str) -> bool: try: - payload = json.loads(text) + payload = json_loads(text) except json.decoder.JSONDecodeError: return False return is_enveloped_from_map(payload) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py index 73321e091278..260fa84c71cc 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py @@ -2,6 +2,8 @@ import logging import sqlalchemy as sa +from common_library.json_serialization import json_loads + from models_library.projects import ProjectID from models_library.users import UserID from pydantic import TypeAdapter @@ -85,7 +87,7 @@ async def write_ports_configuration( ) _logger.debug(message) - node_configuration = json.loads(json_configuration) + node_configuration = json_loads(json_configuration) async with ( DBContextManager(self._db_engine) as engine, engine.begin() as connection, diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py index 7f6801043cd2..51aa3bae3c1c 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py @@ -1,5 +1,4 @@ import asyncio -import json import logging from collections.abc import AsyncGenerator, Coroutine from contextlib import AsyncExitStack @@ -17,6 +16,7 @@ ClientSession, RequestInfo, ) +from common_library.json_serialization import json_loads from models_library.api_schemas_storage.storage_schemas import ( ETag, FileUploadSchema, @@ -143,8 +143,7 @@ class ProgressData: @runtime_checkable class LogRedirectCB(Protocol): - async def __call__(self, log: str) -> None: - ... + async def __call__(self, log: str) -> None: ... async def _file_chunk_writer( @@ -276,7 +275,7 @@ async def _session_put( assert response.status == status.HTTP_200_OK # nosec assert response.headers # nosec assert "Etag" in response.headers # nosec - etag: str = json.loads(response.headers["Etag"]) + etag: str = json_loads(response.headers["Etag"]) return etag diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py index 510f7b14fbb4..7c60de81caf9 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py @@ -1,11 +1,10 @@ import functools -import json import logging from pprint import pformat from typing import Any import pydantic -from common_library.json_serialization import json_dumps +from common_library.json_serialization import json_dumps, json_loads from models_library.projects_nodes_io import NodeID from models_library.utils.nodes import compute_node_hash from packaging import version @@ -50,7 +49,7 @@ async def load( port_config_str: str = await db_manager.get_ports_configuration_from_node_uuid( project_id, node_uuid ) - port_cfg = json.loads(port_config_str) + port_cfg = json_loads(port_config_str) log.debug(f"{port_cfg=}") # pylint: disable=logging-fstring-interpolation if any(k not in port_cfg for k in NODE_REQUIRED_KEYS): From 42556c1ff1c161bc18c4ef9ee41efbb573b8af09 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 14:46:58 +0200 Subject: [PATCH 03/19] =?UTF-8?q?=E2=9C=A8=20Refactor:=20Replace=20`json.l?= =?UTF-8?q?oads`=20with=20`json=5Floads`=20for=20consistency=20across=20di?= =?UTF-8?q?rector=20and=20registry=5Fproxy=20modules?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../clients/director.py | 5 +-- .../src/simcore_service_director/producer.py | 41 +++++++++---------- .../registry_proxy.py | 7 ++-- 3 files changed, 26 insertions(+), 27 deletions(-) diff --git a/services/catalog/src/simcore_service_catalog/clients/director.py b/services/catalog/src/simcore_service_catalog/clients/director.py index 5c3768a58c86..4d9fe37a3dfe 100644 --- a/services/catalog/src/simcore_service_catalog/clients/director.py +++ b/services/catalog/src/simcore_service_catalog/clients/director.py @@ -1,6 +1,5 @@ import asyncio import functools -import json import logging import urllib.parse from collections.abc import AsyncIterator, Awaitable, Callable @@ -9,7 +8,7 @@ from typing import Any, Final import httpx -from common_library.json_serialization import json_dumps +from common_library.json_serialization import json_dumps, json_loads from fastapi import FastAPI, HTTPException from fastapi_lifespan_manager import State from models_library.api_schemas_directorv2.services import ServiceExtras @@ -221,7 +220,7 @@ async def get_service_extras( _logger.debug("Compiling service extras from labels %s", pformat(labels)) if _SERVICE_RUNTIME_SETTINGS in labels: - service_settings: list[dict[str, Any]] = json.loads( + service_settings: list[dict[str, Any]] = json_loads( labels[_SERVICE_RUNTIME_SETTINGS] ) for entry in service_settings: diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 78d218d34768..c57b8e190c48 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -13,6 +13,7 @@ import arrow import httpx import tenacity +from common_library.json_serialization import json_loads from fastapi import FastAPI, status from packaging.version import Version from servicelib.async_utils import run_sequentially_in_context @@ -135,7 +136,7 @@ async def _read_service_settings( ) -> dict[str, Any] | list[Any] | None: image_labels, _ = await registry_proxy.get_image_labels(app, key, tag) settings: dict[str, Any] | list[Any] | None = ( - json.loads(image_labels[settings_name]) + json_loads(image_labels[settings_name]) if settings_name in image_labels else None ) @@ -306,9 +307,9 @@ async def _create_docker_service_params( ] += f", {service_name}_stripprefixregex" placement_constraints_to_substitute: list[str] = [] - placement_substitutions: dict[ - str, str - ] = app_settings.DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS + placement_substitutions: dict[str, str] = ( + app_settings.DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS + ) assert isinstance(service_parameters_labels, list) # nosec for param in service_parameters_labels: _check_setting_correctness(param) @@ -316,18 +317,18 @@ async def _create_docker_service_params( if str(param["value"]).find("%service_uuid%") != -1: dummy_string = json.dumps(param["value"]) dummy_string = dummy_string.replace("%service_uuid%", node_uuid) - param["value"] = json.loads(dummy_string) + param["value"] = json_loads(dummy_string) if param["type"] == "Resources": # python-API compatible for backward compatibility if "mem_limit" in param["value"]: - docker_params["task_template"]["Resources"]["Limits"][ - "MemoryBytes" - ] = param["value"]["mem_limit"] + docker_params["task_template"]["Resources"]["Limits"]["MemoryBytes"] = ( + param["value"]["mem_limit"] + ) if "cpu_limit" in param["value"]: - docker_params["task_template"]["Resources"]["Limits"][ - "NanoCPUs" - ] = param["value"]["cpu_limit"] + docker_params["task_template"]["Resources"]["Limits"]["NanoCPUs"] = ( + param["value"]["cpu_limit"] + ) if "mem_reservation" in param["value"]: docker_params["task_template"]["Resources"]["Reservations"][ "MemoryBytes" @@ -379,11 +380,11 @@ async def _create_docker_service_params( # publishing port on the ingress network. elif param["name"] == "ports" and param["type"] == "int": # backward comp - docker_params["labels"][ - _to_simcore_runtime_docker_label_key("port") - ] = docker_params["labels"][ - f"traefik.http.services.{service_name}.loadbalancer.server.port" - ] = str( + docker_params["labels"][_to_simcore_runtime_docker_label_key("port")] = ( + docker_params["labels"][ + f"traefik.http.services.{service_name}.loadbalancer.server.port" + ] + ) = str( param["value"] ) # REST-API compatible @@ -445,11 +446,9 @@ async def _create_docker_service_params( ] = container_spec["Labels"][ _to_simcore_runtime_docker_label_key("cpu_limit") ] = f"{float(nano_cpus_limit) / 1e9}" - docker_params["labels"][ - _to_simcore_runtime_docker_label_key("memory_limit") - ] = container_spec["Labels"][ - _to_simcore_runtime_docker_label_key("memory_limit") - ] = mem_limit + docker_params["labels"][_to_simcore_runtime_docker_label_key("memory_limit")] = ( + container_spec["Labels"][_to_simcore_runtime_docker_label_key("memory_limit")] + ) = mem_limit # and make the container aware of them via env variables resource_limits = { diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index f90373bb2f18..56b5d812f8c7 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -8,6 +8,7 @@ import httpx from aiocache import Cache, SimpleMemoryCache # type: ignore[import-untyped] +from common_library.json_serialization import json_loads from fastapi import FastAPI, status from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task @@ -380,7 +381,7 @@ async def get_image_labels( request_result, headers = await registry_request( app, path=path, method="GET", use_cache=not update_cache ) - v1_compatibility_key = json.loads(request_result["history"][0]["v1Compatibility"]) + v1_compatibility_key = json_loads(request_result["history"][0]["v1Compatibility"]) container_config: dict[str, Any] = v1_compatibility_key.get( "container_config", v1_compatibility_key["config"] ) @@ -413,7 +414,7 @@ async def get_image_details( if not key.startswith("io.simcore."): continue try: - label_data = json.loads(labels[key]) + label_data = json_loads(labels[key]) for label_key in label_data: image_details[label_key] = label_data[label_key] except json.decoder.JSONDecodeError: @@ -483,7 +484,7 @@ async def list_interactive_service_dependencies( dependency_keys = [] if DEPENDENCIES_LABEL_KEY in image_labels: try: - dependencies = json.loads(image_labels[DEPENDENCIES_LABEL_KEY]) + dependencies = json_loads(image_labels[DEPENDENCIES_LABEL_KEY]) dependency_keys = [ {"key": dependency["key"], "tag": dependency["tag"]} for dependency in dependencies From 51dce02050903c12a497e3f90b723c0ed6108d19 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 14:48:34 +0200 Subject: [PATCH 04/19] replaces json.loads --- .../db/repositories/projects_networks.py | 5 ++- .../docker_service_specs/settings.py | 31 +++++++++++-------- .../scheduler/_core/_events_utils.py | 4 +-- .../api/frontend/routes/_index.py | 3 +- .../api/frontend/routes/_service.py | 3 +- .../api/rest/containers.py | 4 +-- .../core/validation.py | 4 +-- .../modules/nodeports.py | 5 +-- .../utils/simcore_s3_dsm_utils.py | 3 +- .../_db_comp_tasks_listening_task.py | 4 +-- .../projects/_projects_service.py | 5 ++- .../studies_dispatcher/_projects.py | 4 +-- .../users/_notifications_rest.py | 4 +-- 13 files changed, 43 insertions(+), 36 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py index 12fc7fe29322..f8b0b1d3a533 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py @@ -1,7 +1,6 @@ -import json - import sqlalchemy as sa from aiopg.sa.result import RowProxy +from common_library.json_serialization import json_loads from models_library.projects import ProjectID from models_library.projects_networks import NetworksWithAliases, ProjectsNetworks from sqlalchemy.dialects.postgresql import insert as pg_insert @@ -33,7 +32,7 @@ async def upsert_projects_networks( ) async with self.db_engine.acquire() as conn: - row_data = json.loads(projects_networks_to_insert.model_dump_json()) + row_data = json_loads(projects_networks_to_insert.model_dump_json()) insert_stmt = pg_insert(projects_networks).values(**row_data) upsert_snapshot = insert_stmt.on_conflict_do_update( constraint=projects_networks.primary_key, set_=row_data diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py index dcefb810c2cf..6f4a413ea76e 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py @@ -3,6 +3,7 @@ from collections import deque from typing import Any, cast +from common_library.json_serialization import json_loads from models_library.basic_types import EnvVarKey, PortInt from models_library.boot_options import BootOption from models_library.docker import ( @@ -156,11 +157,15 @@ def update_service_params_from_settings( container_spec = create_service_params["task_template"]["ContainerSpec"] # set labels for CPU and Memory limits, for both service and container labels # NOTE: cpu-limit is a float not NanoCPUs!! - container_spec["Labels"][ - f"{to_simcore_runtime_docker_label_key('cpu-limit')}" - ] = str( - float(create_service_params["task_template"]["Resources"]["Limits"]["NanoCPUs"]) - / (1 * 10**9) + container_spec["Labels"][f"{to_simcore_runtime_docker_label_key('cpu-limit')}"] = ( + str( + float( + create_service_params["task_template"]["Resources"]["Limits"][ + "NanoCPUs" + ] + ) + / (1 * 10**9) + ) ) create_service_params["labels"][ f"{to_simcore_runtime_docker_label_key('cpu-limit')}" @@ -401,7 +406,7 @@ def _get_boot_options( if boot_options_encoded is None: return None - boot_options = json.loads(boot_options_encoded)["boot-options"] + boot_options = json_loads(boot_options_encoded)["boot-options"] log.debug("got boot_options=%s", boot_options) return {k: BootOption.model_validate(v) for k, v in boot_options.items()} @@ -443,13 +448,13 @@ async def get_labels_for_involved_services( # paths_mapping express how to map dynamic-sidecar paths to the compose-spec volumes # where the service expects to find its certain folders - labels_for_involved_services: dict[ - str, SimcoreServiceLabels - ] = await _extract_osparc_involved_service_labels( - catalog_client=catalog_client, - service_key=service_key, - service_tag=service_tag, - service_labels=simcore_service_labels, + labels_for_involved_services: dict[str, SimcoreServiceLabels] = ( + await _extract_osparc_involved_service_labels( + catalog_client=catalog_client, + service_key=service_key, + service_tag=service_tag, + service_labels=simcore_service_labels, + ) ) logging.info("labels_for_involved_services=%s", labels_for_involved_services) return labels_for_involved_services diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py index 5703459bf9c4..93a3b1d69237 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py @@ -1,10 +1,10 @@ # pylint: disable=relative-beyond-top-level import asyncio -import json import logging from typing import TYPE_CHECKING, Any, cast +from common_library.json_serialization import json_loads from fastapi import FastAPI from models_library.api_schemas_long_running_tasks.base import ProgressPercent from models_library.products import ProductName @@ -565,7 +565,7 @@ async def _restore_service_state_with_metrics() -> None: scheduler_data.key, scheduler_data.version ) ) - service_outputs_labels = json.loads( + service_outputs_labels = json_loads( simcore_service_labels.model_dump().get("io.simcore.outputs", "{}") ).get("outputs", {}) _logger.debug( diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_index.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_index.py index 1163328bfe72..0d201ac83ea5 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_index.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_index.py @@ -1,6 +1,7 @@ import json import httpx +from common_library.json_serialization import json_loads from fastapi import FastAPI from models_library.projects_nodes_io import NodeID from nicegui import APIRouter, app, ui @@ -39,7 +40,7 @@ def _render_service_details(node_id: NodeID, service: TrackedServiceModel) -> No service.dynamic_service_start.product_name, ) service_status = ( - json.loads(service.service_status) if service.service_status else {} + json_loads(service.service_status) if service.service_status else {} ) dict_to_render["Service State"] = ( "label", diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_service.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_service.py index 468de4aedb96..3896f079cbd7 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_service.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_service.py @@ -1,6 +1,7 @@ import json import httpx +from common_library.json_serialization import json_loads from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStop, ) @@ -89,7 +90,7 @@ async def service_details(node_id: NodeID): scheduler_internals = service_model.model_dump(mode="json") service_status = scheduler_internals.pop("service_status", "{}") - service_status = json.loads("{}" if service_status == "" else service_status) + service_status = json_loads("{}" if service_status == "" else service_status) dynamic_service_start = scheduler_internals.pop("dynamic_service_start") ui.markdown("**Service Status**") diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py index 6ada9de83de4..bf3790606665 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py @@ -1,10 +1,10 @@ # pylint: disable=too-many-arguments -import json import logging from asyncio import Lock from typing import Annotated, Any, Final +from common_library.json_serialization import json_loads from fastapi import APIRouter, Depends, HTTPException from fastapi import Path as PathParam from fastapi import Query, Request, status @@ -234,7 +234,7 @@ async def get_containers_name( """ _ = request - filters_dict: dict[str, str] = json.loads(filters) + filters_dict: dict[str, str] = json_loads(filters) if not isinstance(filters_dict, dict): raise HTTPException( status.HTTP_422_UNPROCESSABLE_ENTITY, diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/validation.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/validation.py index 2737ccdc7da0..2c3a149c6ae8 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/validation.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/validation.py @@ -1,4 +1,3 @@ -import json import logging import os import re @@ -6,6 +5,7 @@ from typing import Any, NamedTuple import yaml +from common_library.json_serialization import json_loads from servicelib.docker_constants import ( DEFAULT_USER_SERVICES_NETWORK_NAME, SUFFIX_EGRESS_PROXY_NAME, @@ -53,7 +53,7 @@ def _get_forwarded_env_vars(container_key: str) -> list[str]: new_entry_key = key.replace("FORWARD_ENV_", "") # parsing `VAR={"destination_containers": ["destination_container"], "env_var": "PAYLOAD"}` - new_entry_payload = json.loads(os.environ[key]) + new_entry_payload = json_loads(os.environ[key]) if container_key not in new_entry_payload["destination_containers"]: continue diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py index 0657ffe237ec..db074ac70714 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py @@ -14,6 +14,7 @@ import magic from aiofiles.os import remove from aiofiles.tempfile import TemporaryDirectory as AioTemporaryDirectory +from common_library.json_serialization import json_loads from models_library.projects import ProjectIDStr from models_library.projects_nodes_io import NodeIDStr from models_library.services_types import ServicePortKey @@ -208,7 +209,7 @@ async def _archive_dir_notified( else: data_file = outputs_path / _KEY_VALUE_FILE_NAME if data_file.exists(): - data = json.loads(data_file.read_text()) + data = json_loads(data_file.read_text()) if port.key in data and data[port.key] is not None: ports_values[port.key] = (data[port.key], None) else: @@ -390,7 +391,7 @@ async def _get_date_from_port_notified( if data: data_file = target_dir / _KEY_VALUE_FILE_NAME if data_file.exists(): - current_data = json.loads(data_file.read_text()) + current_data = json_loads(data_file.read_text()) # merge data data = {**current_data, **data} data_file.write_text(json.dumps(data)) diff --git a/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py b/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py index d9e36c9f8567..6386e4e8ea16 100644 --- a/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py +++ b/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py @@ -7,6 +7,7 @@ from aws_library.s3 import S3MetaData, SimcoreS3API from aws_library.s3._constants import STREAM_READER_CHUNK_SIZE from aws_library.s3._models import S3ObjectKey +from common_library.json_serialization import json_loads from models_library.api_schemas_storage.storage_schemas import S3BucketName from models_library.projects import ProjectID, ProjectIDStr from models_library.projects_nodes_io import ( @@ -250,7 +251,7 @@ async def list_child_paths_from_s3( """ objects_cursor = None if cursor is not None: - cursor_params = orjson.loads(cursor) + cursor_params = json_loads(cursor) assert cursor_params["file_filter"] == f"{file_filter}" # nosec objects_cursor = cursor_params["objects_next_cursor"] list_s3_objects, objects_next_cursor = await s3_client.list_objects( diff --git a/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py b/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py index f6f3853eaf7a..c1bdb6790a44 100644 --- a/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py +++ b/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py @@ -4,7 +4,6 @@ """ import asyncio -import json import logging from collections.abc import AsyncIterator from contextlib import suppress @@ -14,6 +13,7 @@ from aiohttp import web from aiopg.sa import Engine from aiopg.sa.connection import SAConnection +from commontypes.json_serialization import json_loads from models_library.errors import ErrorDict from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -152,7 +152,7 @@ async def _listen(app: web.Application, db_engine: Engine) -> NoReturn: continue notification = conn.connection.notifies.get_nowait() # get the data and the info on what changed - payload = _CompTaskNotificationPayload(**json.loads(notification.payload)) + payload = _CompTaskNotificationPayload(**json_loads(notification.payload)) _logger.debug("received update from database: %s", f"{payload=}") await _handle_db_notification(app, payload, conn) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index f079b8fb5f67..ca3e03978a4f 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -10,7 +10,6 @@ import asyncio import collections import datetime -import json import logging from collections import defaultdict from collections.abc import Generator @@ -21,7 +20,7 @@ from uuid import UUID, uuid4 from aiohttp import web -from common_library.json_serialization import json_dumps +from common_library.json_serialization import json_dumps, json_loads from models_library.api_schemas_clusters_keeper.ec2_instances import EC2InstanceTypeGet from models_library.api_schemas_directorv2.dynamic_services import ( GetProjectInactivityResponse, @@ -1598,7 +1597,7 @@ async def add_project_states_for_user( prj_node = project["workbench"].get(str(node_id)) if prj_node is None: continue - node_state_dict = json.loads( + node_state_dict = json_loads( node_state.model_dump_json(by_alias=True, exclude_unset=True) ) prj_node.setdefault("state", {}).update(node_state_dict) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py index a4adbf8e5763..a0509275a125 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py @@ -6,12 +6,12 @@ """ -import json import logging from pathlib import Path from typing import NamedTuple from aiohttp import web +from common_library.json_serialization import json_loads from models_library.api_schemas_webserver.projects_ui import StudyUI from models_library.projects import DateTimeStr, Project, ProjectID from models_library.projects_access import AccessRights, GroupIDStr @@ -194,7 +194,7 @@ async def _add_new_project( db: ProjectDBAPI = app[APP_PROJECT_DBAPI] # validated project is transform in dict via json to use only primitive types - project_in: dict = json.loads( + project_in: dict = json_loads( project.model_dump_json(exclude_none=True, by_alias=True) ) diff --git a/services/web/server/src/simcore_service_webserver/users/_notifications_rest.py b/services/web/server/src/simcore_service_webserver/users/_notifications_rest.py index 65c427bf7b0b..8f4f920168e5 100644 --- a/services/web/server/src/simcore_service_webserver/users/_notifications_rest.py +++ b/services/web/server/src/simcore_service_webserver/users/_notifications_rest.py @@ -1,8 +1,8 @@ -import json import logging import redis.asyncio as aioredis from aiohttp import web +from common_library.json_serialization import json_loads from models_library.api_schemas_webserver.users import MyPermissionGet from models_library.users import UserPermission from pydantic import BaseModel @@ -45,7 +45,7 @@ async def _get_user_notifications( get_notification_key(user_id), -1 * MAX_NOTIFICATIONS_FOR_USER_TO_SHOW, -1 ) ) - notifications = [json.loads(x) for x in raw_notifications] + notifications = [json_loads(x) for x in raw_notifications] # Make it backwards compatible for n in notifications: if "product" not in n: From 88bf830715da04b27c598a15c95ab222bf42f399 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 14:51:04 +0200 Subject: [PATCH 05/19] drop unused --- .../src/models_library/utils/change_case.py | 5 +- .../src/pytest_simcore/helpers/s3.py | 4 +- .../src/simcore_service_webserver/utils.py | 23 +------ .../server/tests/unit/isolated/test_utils.py | 60 ------------------- 4 files changed, 6 insertions(+), 86 deletions(-) diff --git a/packages/models-library/src/models_library/utils/change_case.py b/packages/models-library/src/models_library/utils/change_case.py index 098f05ab7fd4..89495a12e8b1 100644 --- a/packages/models-library/src/models_library/utils/change_case.py +++ b/packages/models-library/src/models_library/utils/change_case.py @@ -1,4 +1,4 @@ -""" String convesion +"""String convesion Example of usage in pydantic: @@ -7,10 +7,11 @@ class Config: extra = Extra.forbid alias_generator = snake_to_camel # <-------- - json_loads = orjson.loads + json_loads = json_loads json_dumps = json_dumps """ + # Partially taken from https://github.com/autoferrit/python-change-case/blob/master/change_case/change_case.py#L131 import re from typing import Final diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py index 61d630d994c3..fd16f8683cb9 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py @@ -5,8 +5,8 @@ import aiofiles import httpx -import orjson from aws_library.s3 import MultiPartUploadLinks +from common_library.json_serialization import json_loads from fastapi import status from models_library.api_schemas_storage.storage_schemas import ( ETag, @@ -71,7 +71,7 @@ async def upload_file_part( assert response.status_code == status.HTTP_200_OK assert response.headers assert "Etag" in response.headers - received_e_tag = orjson.loads(response.headers["Etag"]) + received_e_tag = json_loads(response.headers["Etag"]) print( f"--> completed upload {this_file_chunk_size=} of {file=}, [{part_index + 1}/{num_parts}], {received_e_tag=}" ) diff --git a/services/web/server/src/simcore_service_webserver/utils.py b/services/web/server/src/simcore_service_webserver/utils.py index d7b50e7fb5e4..4a6c9d0169e6 100644 --- a/services/web/server/src/simcore_service_webserver/utils.py +++ b/services/web/server/src/simcore_service_webserver/utils.py @@ -1,5 +1,5 @@ """ - General utilities and helper functions +General utilities and helper functions """ import asyncio @@ -11,11 +11,8 @@ import tracemalloc from datetime import datetime from pathlib import Path -from typing import Any -import orjson from common_library.error_codes import ErrorCodeStr -from models_library.basic_types import SHA1Str from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict TypedDict, ) @@ -176,21 +173,3 @@ def compose_support_error_msg( def get_traceback_string(exception: BaseException) -> str: return "".join(traceback.format_exception(exception)) - - -# ----------------------------------------------- -# -# SERIALIZATION, CHECKSUMS, -# - - -def compute_sha1_on_small_dataset(d: Any) -> SHA1Str: - """ - This should be used for small datasets, otherwise it should be chuncked - and aggregated - - More details in test_utils.py:test_compute_sha1_on_small_dataset - """ - # SEE options in https://github.com/ijl/orjson#option - data_bytes = orjson.dumps(d, option=orjson.OPT_NON_STR_KEYS | orjson.OPT_SORT_KEYS) - return SHA1Str(hashlib.sha1(data_bytes).hexdigest()) # nosec # NOSONAR diff --git a/services/web/server/tests/unit/isolated/test_utils.py b/services/web/server/tests/unit/isolated/test_utils.py index 1bcb69532694..d4017a79d8c3 100644 --- a/services/web/server/tests/unit/isolated/test_utils.py +++ b/services/web/server/tests/unit/isolated/test_utils.py @@ -1,16 +1,10 @@ -import asyncio -import concurrent.futures import time -import timeit import urllib.parse -from contextlib import contextmanager from datetime import datetime -import pytest from simcore_service_webserver.utils import ( DATETIME_FORMAT, compose_support_error_msg, - compute_sha1_on_small_dataset, now_str, to_datetime, ) @@ -65,60 +59,6 @@ def test_time_utils(): assert now_time == datetime.strptime(snapshot, DATETIME_FORMAT) -@pytest.mark.skip(reason="DEV-demo") -async def test_compute_sha1_on_small_dataset(fake_project: dict): - # Based on GitHK review https://github.com/ITISFoundation/osparc-simcore/pull/2556: - # From what I know, these having function tend to be a bit CPU intensive, based on the size of the dataset. - # Could we maybe have an async version of this function here, run it on an executor? - # - # PC: Here we check the overhead of sha when adding a pool executor - - @contextmanager - def timeit_ctx(what): - start = timeit.default_timer() - yield - stop = timeit.default_timer() - print(f"Time for {what}:", f"{stop - start} secs") - - # dataset is N copies of a project dataset (typical dataset 'unit' in this module) - N = 10_000 - data = [ - fake_project, - ] * N - - print("-" * 100) - with timeit_ctx("compute_sha1 sync"): - project_sha2_sync = compute_sha1_on_small_dataset(data) - - with timeit_ctx("compute_sha1 async"): - loop = asyncio.get_running_loop() - with concurrent.futures.ProcessPoolExecutor() as pool: - project_sha2_async = await loop.run_in_executor( - pool, compute_sha1_on_small_dataset, data - ) - - assert project_sha2_sync == project_sha2_async - - # N=1 - # Time for compute_sha1_sync: 3.153807483613491e-05 secs - # Time for compute_sha1_async: 0.03046882478520274 secs - - # N=100 - # Time for compute_sha1 sync: 0.0005367340054363012 secs - # Time for compute_sha1 async: 0.029975621961057186 secs - - # N=1000 - # Time for compute_sha1 sync: 0.005468853982165456 secs - # Time for compute_sha1 async: 0.04451707797124982 secs - - # N=10000 - # Time for compute_sha1 sync: 0.05151305114850402 secs - # Time for compute_sha1 async: 0.09799357503652573 secs - - # For larger datasets, async solution definitvely scales better - # but for smaller ones, the overhead is considerable - - def test_compose_support_error_msg(): msg = compose_support_error_msg( From e5024e99126ced340b75d47dd473d87924f44318 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:02:17 +0200 Subject: [PATCH 06/19] =?UTF-8?q?=E2=9C=A8=20Refactor:=20Replace=20`json.d?= =?UTF-8?q?umps`=20with=20`json=5Fdumps`=20for=20consistency=20across=20mo?= =?UTF-8?q?dules?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../models-library/src/models_library/utils/nodes.py | 4 ++-- .../src/simcore_postgres_database/models/products.py | 4 ++-- .../src/simcore_sdk/node_ports_common/dbmanager.py | 5 ++--- .../src/simcore_service_autoscaling/utils/redis.py | 7 +++---- .../src/simcore_service_autoscaling/utils/utils_ec2.py | 10 ++++------ .../simcore_service_clusters_keeper/utils/clusters.py | 4 ++-- .../computational_sidecar/core.py | 4 ++-- .../models/dynamic_services_scheduler.py | 6 +++--- .../simcore_service_director_v2/modules/dask_client.py | 6 +++--- .../modules/dynamic_sidecar/api_client/_thin.py | 4 ++-- .../modules/dynamic_sidecar/docker_api/_core.py | 7 +++---- .../dynamic_sidecar/docker_service_specs/settings.py | 5 ++--- .../director/src/simcore_service_director/producer.py | 5 ++--- .../api/frontend/routes/_index.py | 6 ++---- .../api/frontend/routes/_service.py | 10 ++++------ .../src/simcore_service_dynamic_sidecar/cli.py | 4 ++-- .../utils/simcore_s3_dsm_utils.py | 5 ++--- .../login/_preregistration_service.py | 2 +- 18 files changed, 43 insertions(+), 55 deletions(-) diff --git a/packages/models-library/src/models_library/utils/nodes.py b/packages/models-library/src/models_library/utils/nodes.py index dd791677d198..4c5465c96715 100644 --- a/packages/models-library/src/models_library/utils/nodes.py +++ b/packages/models-library/src/models_library/utils/nodes.py @@ -1,10 +1,10 @@ import hashlib -import json import logging from collections.abc import Callable, Coroutine from copy import deepcopy from typing import Any +from common_library.json_serialization import json_dumps from pydantic import BaseModel, TypeAdapter from ..projects import Project @@ -67,6 +67,6 @@ async def compute_node_hash( # now create the hash # WARNING: Here we cannot change to json_serialization.json_dumps because if would create a different dump string and therefore a different hash # NOTE that these hashes might have been already stored elsewhere - block_string = json.dumps(resolved_payload, sort_keys=True).encode("utf-8") + block_string = json_dumps(resolved_payload, sort_keys=True).encode("utf-8") raw_hash = hashlib.sha256(block_string) return raw_hash.hexdigest() diff --git a/packages/postgres-database/src/simcore_postgres_database/models/products.py b/packages/postgres-database/src/simcore_postgres_database/models/products.py index c7668a6953a6..a646bf7ef8f7 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/products.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/products.py @@ -5,10 +5,10 @@ - Every product has a front-end with exactly the same name """ -import json from typing import Literal import sqlalchemy as sa +from common_library.json_serialization import json_dumps from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.sql import func from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict @@ -114,7 +114,7 @@ class ProductLoginSettingsDict(TypedDict, total=False): # NOTE: defaults affects migration!! LOGIN_SETTINGS_DEFAULT = ProductLoginSettingsDict() # = {} -_LOGIN_SETTINGS_SERVER_DEFAULT = json.dumps(LOGIN_SETTINGS_DEFAULT) +_LOGIN_SETTINGS_SERVER_DEFAULT = json_dumps(LOGIN_SETTINGS_DEFAULT) # diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py index 260fa84c71cc..2fc41388f527 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py @@ -1,8 +1,7 @@ -import json import logging import sqlalchemy as sa -from common_library.json_serialization import json_loads +from common_library.json_serialization import json_dumps, json_loads from models_library.projects import ProjectID from models_library.users import UserID @@ -118,7 +117,7 @@ async def get_ports_configuration_from_node_uuid( engine.connect() as connection, ): node = await _get_node_from_db(project_id, node_uuid, connection) - node_json_config = json.dumps( + node_json_config = json_dumps( { "schema": node.schema, "inputs": node.inputs, diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/redis.py b/services/autoscaling/src/simcore_service_autoscaling/utils/redis.py index b5ad337c872e..3d4998db87d4 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/redis.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/redis.py @@ -1,5 +1,4 @@ -import json - +from common_library.json_serialization import json_dumps from fastapi import FastAPI from ..core.settings import ApplicationSettings @@ -14,7 +13,7 @@ def create_lock_key_and_value(app: FastAPI) -> tuple[str, str]: "dynamic", *app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS, ] - lock_value = json.dumps( + lock_value = json_dumps( { "node_labels": app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS } @@ -24,7 +23,7 @@ def create_lock_key_and_value(app: FastAPI) -> tuple[str, str]: "computational", f"{app_settings.AUTOSCALING_DASK.DASK_MONITORING_URL}", ] - lock_value = json.dumps( + lock_value = json_dumps( {"scheduler_url": f"{app_settings.AUTOSCALING_DASK.DASK_MONITORING_URL}"} ) lock_key = ":".join(f"{k}" for k in lock_key_parts) diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_ec2.py b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_ec2.py index 27aca7430eb5..b3b76a48717f 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_ec2.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_ec2.py @@ -1,14 +1,12 @@ -""" Free helper functions for AWS API +"""Free helper functions for AWS API""" -""" - -import json import logging from collections import OrderedDict from collections.abc import Callable from textwrap import dedent from aws_library.ec2 import AWSTagKey, AWSTagValue, EC2InstanceType, EC2Tags, Resources +from common_library.json_serialization import json_dumps from .._meta import VERSION from ..core.errors import ConfigurationError, TaskBestFittingInstanceNotFoundError @@ -23,12 +21,12 @@ def get_ec2_tags_dynamic(app_settings: ApplicationSettings) -> EC2Tags: return { AWSTagKey("io.simcore.autoscaling.version"): AWSTagValue(f"{VERSION}"), AWSTagKey("io.simcore.autoscaling.monitored_nodes_labels"): AWSTagValue( - json.dumps( + json_dumps( app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS ) ), AWSTagKey("io.simcore.autoscaling.monitored_services_labels"): AWSTagValue( - json.dumps( + json_dumps( app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_SERVICE_LABELS ) ), diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py index d2820ef2b882..cf1543e63925 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py @@ -1,7 +1,6 @@ import base64 import datetime import functools -import json from pathlib import Path from typing import Any, Final @@ -9,6 +8,7 @@ import yaml from aws_library.ec2 import EC2InstanceBootSpecific, EC2InstanceData, EC2Tags from aws_library.ec2._models import CommandStr +from common_library.json_serialization import json_dumps from common_library.serialization import model_dump_with_secrets from fastapi.encoders import jsonable_encoder from models_library.api_schemas_clusters_keeper.clusters import ( @@ -80,7 +80,7 @@ def _convert_to_env_list(entries: list[Any]) -> str: return f"[{entries_as_str}]" def _convert_to_env_dict(entries: dict[str, Any]) -> str: - return f"'{json.dumps(jsonable_encoder(entries))}'" + return f"'{json_dumps(jsonable_encoder(entries))}'" assert app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES # nosec diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py index 126485b26458..7b753e306207 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py @@ -1,5 +1,4 @@ import asyncio -import json import logging import os import socket @@ -12,6 +11,7 @@ from uuid import uuid4 from aiodocker import Docker +from common_library.json_serialization import json_dumps from dask_task_models_library.container_tasks.docker import DockerBasicAuth from dask_task_models_library.container_tasks.errors import ServiceRuntimeError from dask_task_models_library.container_tasks.io import FileUrl, TaskOutputData @@ -95,7 +95,7 @@ async def _write_input_data( # NOTE: temporary solution until new version is created for task in download_tasks: await task - input_data_file.write_text(json.dumps(local_input_data_file)) + input_data_file.write_text(json_dumps(local_input_data_file)) await self._publish_sidecar_log("All the input data were downloaded.") diff --git a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py index 5fbe75915c67..39dfda3bc98a 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py @@ -1,4 +1,3 @@ -import json import logging from collections.abc import Mapping from datetime import datetime @@ -10,6 +9,7 @@ import arrow from common_library.error_codes import ErrorCodeStr +from common_library.json_serialization import json_dumps from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceCreate from models_library.api_schemas_directorv2.dynamic_services_service import ( CommonServiceDetails, @@ -504,7 +504,7 @@ def from_http_request( "product_name": service.product_name, "paths_mapping": simcore_service_labels.paths_mapping, "callbacks_mapping": simcore_service_labels.callbacks_mapping, - "compose_spec": json.dumps(simcore_service_labels.compose_spec), + "compose_spec": json_dumps(simcore_service_labels.compose_spec), "container_http_entry": simcore_service_labels.container_http_entry, "restart_policy": simcore_service_labels.restart_policy, "dynamic_sidecar_network_name": names_helper.dynamic_sidecar_network_name, @@ -541,7 +541,7 @@ def as_label_data(self) -> str: # compose_spec needs to be json encoded before encoding it to json # and storing it in the label return self.model_copy( - update={"compose_spec": json.dumps(self.compose_spec)}, + update={"compose_spec": json_dumps(self.compose_spec)}, deep=True, ).model_dump_json() diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py index 7e9a9039bbb6..32643abe776d 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py @@ -9,7 +9,6 @@ """ import asyncio -import json import logging import traceback from collections.abc import Callable @@ -21,6 +20,7 @@ import dask.typing import distributed from aiohttp import ClientResponseError +from common_library.json_serialization import json_dumps from dask_task_models_library.container_tasks.docker import DockerBasicAuth from dask_task_models_library.container_tasks.errors import TaskCancelledError from dask_task_models_library.container_tasks.io import ( @@ -162,11 +162,11 @@ async def create( _logger.info( "Connection to %s succeeded [%s]", f"dask-scheduler at {endpoint}", - json.dumps(attempt.retry_state.retry_object.statistics), + json_dumps(attempt.retry_state.retry_object.statistics), ) _logger.info( "Scheduler info:\n%s", - json.dumps(backend.client.scheduler_info(), indent=2), + json_dumps(backend.client.scheduler_info(), indent=2), ) return instance # this is to satisfy pylance diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py index 09d5c7a72721..3a3cc1c3118d 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py @@ -1,6 +1,6 @@ -import json from typing import Any +from common_library.json_serialization import json_dumps from fastapi import FastAPI, status from httpx import Response, Timeout from models_library.services_creation import CreateServiceMetricsAdditionalParams @@ -123,7 +123,7 @@ async def post_containers_ports_outputs_dirs( async def get_containers_name( self, dynamic_sidecar_endpoint: AnyHttpUrl, *, dynamic_sidecar_network_name: str ) -> Response: - filters = json.dumps( + filters = json_dumps( { "network": dynamic_sidecar_network_name, "exclude": SUFFIX_EGRESS_PROXY_NAME, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_core.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_core.py index 350c406c1ebe..8a7e5d152d4d 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_core.py @@ -1,4 +1,3 @@ -import json import logging import re from collections.abc import Mapping @@ -98,7 +97,7 @@ def _to_snake_case(string: str) -> str: async def create_service_and_get_id( - create_service_data: AioDockerServiceSpec | dict[str, Any] + create_service_data: AioDockerServiceSpec | dict[str, Any], ) -> ServiceId: # NOTE: ideally the argument should always be AioDockerServiceSpec # but for that we need get_dynamic_proxy_spec to return that type @@ -108,13 +107,13 @@ async def create_service_and_get_id( ) kwargs = {_to_snake_case(k): v for k, v in kwargs.items()} - logging.debug("Creating service with\n%s", json.dumps(kwargs, indent=1)) + logging.debug("Creating service with\n%s", json_dumps(kwargs, indent=1)) service_start_result = await client.services.create(**kwargs) log.debug( "Started service %s with\n%s", service_start_result, - json.dumps(kwargs, indent=1), + json_dumps(kwargs, indent=1), ) if "ID" not in service_start_result: diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py index 6f4a413ea76e..3e77ed444e13 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py @@ -1,9 +1,8 @@ -import json import logging from collections import deque from typing import Any, cast -from common_library.json_serialization import json_loads +from common_library.json_serialization import json_dumps, json_loads from models_library.basic_types import EnvVarKey, PortInt from models_library.boot_options import BootOption from models_library.docker import ( @@ -375,7 +374,7 @@ def _patch_target_service_into_env_vars( def _format_env_var(env_var: str, destination_container: list[str]) -> str: var_name, var_payload = env_var.split("=") - json_encoded = json.dumps( + json_encoded = json_dumps( {"destination_containers": destination_container, "env_var": var_payload} ) return f"{var_name}={json_encoded}" diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index c57b8e190c48..936fbf55be96 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -1,6 +1,5 @@ import asyncio import contextlib -import json import logging import re from datetime import timedelta @@ -13,7 +12,7 @@ import arrow import httpx import tenacity -from common_library.json_serialization import json_loads +from common_library.json_serialization import json_dumps, json_loads from fastapi import FastAPI, status from packaging.version import Version from servicelib.async_utils import run_sequentially_in_context @@ -315,7 +314,7 @@ async def _create_docker_service_params( _check_setting_correctness(param) # replace %service_uuid% by the given uuid if str(param["value"]).find("%service_uuid%") != -1: - dummy_string = json.dumps(param["value"]) + dummy_string = json_dumps(param["value"]) dummy_string = dummy_string.replace("%service_uuid%", node_uuid) param["value"] = json_loads(dummy_string) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_index.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_index.py index 0d201ac83ea5..b6f7d5b1c919 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_index.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_index.py @@ -1,7 +1,5 @@ -import json - import httpx -from common_library.json_serialization import json_loads +from common_library.json_serialization import json_dumps, json_loads from fastapi import FastAPI from models_library.projects_nodes_io import NodeID from nicegui import APIRouter, app, ui @@ -120,7 +118,7 @@ def _get_clean_hashable(model: TrackedServiceModel) -> dict: def _get_hash(items: list[tuple[NodeID, TrackedServiceModel]]) -> int: return hash( - json.dumps([(f"{key}", _get_clean_hashable(model)) for key, model in items]) + json_dumps([(f"{key}", _get_clean_hashable(model)) for key, model in items]) ) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_service.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_service.py index 3896f079cbd7..ac073072a44a 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_service.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_service.py @@ -1,7 +1,5 @@ -import json - import httpx -from common_library.json_serialization import json_loads +from common_library.json_serialization import json_dumps, json_loads from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStop, ) @@ -94,13 +92,13 @@ async def service_details(node_id: NodeID): dynamic_service_start = scheduler_internals.pop("dynamic_service_start") ui.markdown("**Service Status**") - ui.code(json.dumps(service_status, indent=2), language="json") + ui.code(json_dumps(service_status, indent=2), language="json") ui.markdown("**Scheduler Internals**") - ui.code(json.dumps(scheduler_internals, indent=2), language="json") + ui.code(json_dumps(scheduler_internals, indent=2), language="json") ui.markdown("**Start Parameters**") - ui.code(json.dumps(dynamic_service_start, indent=2), language="json") + ui.code(json_dumps(dynamic_service_start, indent=2), language="json") ui.markdown("**Raw serialized data (the one used to render the above**") ui.code(service_model.model_dump_json(indent=2), language="json") diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py index e895c3db122c..4bbf9e6016ed 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py @@ -1,10 +1,10 @@ import asyncio -import json import logging from collections.abc import AsyncIterator from contextlib import asynccontextmanager import typer +from common_library.json_serialization import json_dumps from fastapi import FastAPI from servicelib.fastapi.long_running_tasks.server import TaskProgress from settings_library.utils_cli import create_settings_command @@ -31,7 +31,7 @@ def openapi(): """Prints OpenAPI specifications in json format""" app = create_base_app() - typer.secho(json.dumps(app.openapi(), indent=2)) + typer.secho(json_dumps(app.openapi(), indent=2)) @asynccontextmanager diff --git a/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py b/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py index 6386e4e8ea16..d87d528bcb90 100644 --- a/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py +++ b/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py @@ -3,11 +3,10 @@ from typing import TypeAlias from uuid import uuid4 -import orjson from aws_library.s3 import S3MetaData, SimcoreS3API from aws_library.s3._constants import STREAM_READER_CHUNK_SIZE from aws_library.s3._models import S3ObjectKey -from common_library.json_serialization import json_loads +from common_library.json_serialization import json_dumps, json_loads from models_library.api_schemas_storage.storage_schemas import S3BucketName from models_library.projects import ProjectID, ProjectIDStr from models_library.projects_nodes_io import ( @@ -278,7 +277,7 @@ async def list_child_paths_from_s3( ] next_cursor = None if objects_next_cursor: - next_cursor = orjson.dumps( + next_cursor = json_dumps( { "file_filter": f"{file_filter}", "objects_next_cursor": objects_next_cursor, diff --git a/services/web/server/src/simcore_service_webserver/login/_preregistration_service.py b/services/web/server/src/simcore_service_webserver/login/_preregistration_service.py index 3e248fcabee9..8a5d4194330d 100644 --- a/services/web/server/src/simcore_service_webserver/login/_preregistration_service.py +++ b/services/web/server/src/simcore_service_webserver/login/_preregistration_service.py @@ -54,7 +54,7 @@ async def send_close_account_email( def _json_encoder_and_dumps(obj: Any, **kwargs): - # NOTE: equivalent json.dumps(obj, default=jsonable_encode(pydantic_encoder(.)) + # NOTE: equivalent json_dumps(obj, default=jsonable_encode(pydantic_encoder(.)) return json_dumps(jsonable_encoder(obj), **kwargs) From b4669d8d3f7a73b5406b1396fec450d836eee618 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:29:38 +0200 Subject: [PATCH 07/19] =?UTF-8?q?=E2=9C=A8=20Update:=20Add=20guidelines=20?= =?UTF-8?q?for=20JSON=20serialization=20using=20`json=5Floads`=20and=20Pyd?= =?UTF-8?q?antic=20methods?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/copilot-instructions.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index dacab64e334b..3df6854174a2 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -23,6 +23,14 @@ This document provides guidelines and best practices for using GitHub Copilot in - ensure we use `fastapi` >0.100 compatible code - use f-string formatting + +### Json serialization + +- Use `json_loads` from `common_library.json_serialization` instead of `json.dumps` / `json.loads`. +- Prefer Pydantic model methods (e.g., `model.model_dump_json()`) for serialization/deserialization. +- Avoid using the built-in `json` module for these tasks. + + ## Node.js-Specific Instructions - Use ES6+ syntax and features. From 82ebca245fe5b7e16c6adabb4739a3bbb0e24b9c Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:31:57 +0200 Subject: [PATCH 08/19] fixes mypy --- .../src/models_library/utils/labels_annotations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/utils/labels_annotations.py b/packages/models-library/src/models_library/utils/labels_annotations.py index 339d7d1b52ce..e08f66e71cb2 100644 --- a/packages/models-library/src/models_library/utils/labels_annotations.py +++ b/packages/models-library/src/models_library/utils/labels_annotations.py @@ -56,7 +56,7 @@ def from_labels( for key, label in labels.items(): if key.startswith(f"{prefix_key}."): try: - value = json_loads(label) # type: ignore + value = json_loads(label) except JSONDecodeError: value = label From 1650e57c93b917da0c1e6ef48bce84cf8bf94e95 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:40:03 +0200 Subject: [PATCH 09/19] reverts tests --- packages/pytest-simcore/src/pytest_simcore/helpers/s3.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py index fd16f8683cb9..61d630d994c3 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py @@ -5,8 +5,8 @@ import aiofiles import httpx +import orjson from aws_library.s3 import MultiPartUploadLinks -from common_library.json_serialization import json_loads from fastapi import status from models_library.api_schemas_storage.storage_schemas import ( ETag, @@ -71,7 +71,7 @@ async def upload_file_part( assert response.status_code == status.HTTP_200_OK assert response.headers assert "Etag" in response.headers - received_e_tag = json_loads(response.headers["Etag"]) + received_e_tag = orjson.loads(response.headers["Etag"]) print( f"--> completed upload {this_file_chunk_size=} of {file=}, [{part_index + 1}/{num_parts}], {received_e_tag=}" ) From cd30794fff6faa0a7de303d70aa688e11406518a Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:41:18 +0200 Subject: [PATCH 10/19] fixing tests --- services/dask-sidecar/tests/unit/test_tasks.py | 4 ++-- .../db_listener/_db_comp_tasks_listening_task.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/dask-sidecar/tests/unit/test_tasks.py b/services/dask-sidecar/tests/unit/test_tasks.py index 5beebe2e37fb..b63ecd505f62 100644 --- a/services/dask-sidecar/tests/unit/test_tasks.py +++ b/services/dask-sidecar/tests/unit/test_tasks.py @@ -414,7 +414,7 @@ def _creator(command: list[str] | None = None) -> ServiceExampleParam: @pytest.fixture() def failing_ubuntu_task( - sidecar_task: Callable[..., ServiceExampleParam] + sidecar_task: Callable[..., ServiceExampleParam], ) -> ServiceExampleParam: return sidecar_task(command=["/bin/bash", "-c", "some stupid failing command"]) @@ -444,7 +444,7 @@ def mocked_get_image_labels( ) -> mock.Mock: assert "json_schema_extra" in ServiceMetaDataPublished.model_config labels: ImageLabels = TypeAdapter(ImageLabels).validate_python( - ServiceMetaDataPublished.model_config["json_schema_extra"]["examples"][0], + ServiceMetaDataPublished.model_json_schema()["examples"][0], ) labels.integration_version = f"{integration_version}" return mocker.patch( diff --git a/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py b/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py index c1bdb6790a44..61b700cf802e 100644 --- a/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py +++ b/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py @@ -13,7 +13,7 @@ from aiohttp import web from aiopg.sa import Engine from aiopg.sa.connection import SAConnection -from commontypes.json_serialization import json_loads +from common_types.json_serialization import json_loads from models_library.errors import ErrorDict from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID From 09ead8dc10f151f3b4c7ce9e71d41a86f3752c37 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:56:59 +0200 Subject: [PATCH 11/19] minor --- packages/simcore-sdk/tests/unit/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/simcore-sdk/tests/unit/conftest.py b/packages/simcore-sdk/tests/unit/conftest.py index 118141759fd5..34cd932081cf 100644 --- a/packages/simcore-sdk/tests/unit/conftest.py +++ b/packages/simcore-sdk/tests/unit/conftest.py @@ -27,7 +27,7 @@ def node_uuid() -> str: return str(uuid4()) -@pytest.fixture(scope="function") +@pytest.fixture async def mock_db_manager( monkeypatch, project_id: str, From fed76cb7bafd842390cf4874416e14cb269c8bc0 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 16:03:22 +0200 Subject: [PATCH 12/19] fix: replace custom json_dumps with standard json.dumps for hashing --- packages/models-library/src/models_library/utils/nodes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/models-library/src/models_library/utils/nodes.py b/packages/models-library/src/models_library/utils/nodes.py index 4c5465c96715..dd791677d198 100644 --- a/packages/models-library/src/models_library/utils/nodes.py +++ b/packages/models-library/src/models_library/utils/nodes.py @@ -1,10 +1,10 @@ import hashlib +import json import logging from collections.abc import Callable, Coroutine from copy import deepcopy from typing import Any -from common_library.json_serialization import json_dumps from pydantic import BaseModel, TypeAdapter from ..projects import Project @@ -67,6 +67,6 @@ async def compute_node_hash( # now create the hash # WARNING: Here we cannot change to json_serialization.json_dumps because if would create a different dump string and therefore a different hash # NOTE that these hashes might have been already stored elsewhere - block_string = json_dumps(resolved_payload, sort_keys=True).encode("utf-8") + block_string = json.dumps(resolved_payload, sort_keys=True).encode("utf-8") raw_hash = hashlib.sha256(block_string) return raw_hash.hexdigest() From 167b394f7bfd4f6e4015327fadb40d6a3287e1f2 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 16:04:59 +0200 Subject: [PATCH 13/19] cleanup --- packages/models-library/src/models_library/utils/nodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/utils/nodes.py b/packages/models-library/src/models_library/utils/nodes.py index dd791677d198..fd542768f830 100644 --- a/packages/models-library/src/models_library/utils/nodes.py +++ b/packages/models-library/src/models_library/utils/nodes.py @@ -64,8 +64,8 @@ async def compute_node_hash( if payload is not None: resolved_payload[port_type][port_key] = payload - # now create the hash # WARNING: Here we cannot change to json_serialization.json_dumps because if would create a different dump string and therefore a different hash + # typically test_node_ports_v2_serialization_v2.py::test_dump will fail if you do this change. # NOTE that these hashes might have been already stored elsewhere block_string = json.dumps(resolved_payload, sort_keys=True).encode("utf-8") raw_hash = hashlib.sha256(block_string) From c11b04a0c84adc3bd66ab2c746401360563e4130 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 16:08:55 +0200 Subject: [PATCH 14/19] cleanup prompt --- .github/copilot-instructions.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 3df6854174a2..18abee576c12 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -26,9 +26,8 @@ This document provides guidelines and best practices for using GitHub Copilot in ### Json serialization -- Use `json_loads` from `common_library.json_serialization` instead of `json.dumps` / `json.loads`. -- Prefer Pydantic model methods (e.g., `model.model_dump_json()`) for serialization/deserialization. -- Avoid using the built-in `json` module for these tasks. +- Generally use `json_dumps`/`json_loads` from `common_library.json_serialization` to built-in `json.dumps` / `json.loads`. +- Prefer Pydantic model methods (e.g., `model.model_dump_json()`) for serialization. ## Node.js-Specific Instructions From b6bf3ad5f35be5a293be4077bb3189e6a90a5d95 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 16:40:42 +0200 Subject: [PATCH 15/19] @GitHK review: updated the example --- .../src/models_library/utils/change_case.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/packages/models-library/src/models_library/utils/change_case.py b/packages/models-library/src/models_library/utils/change_case.py index 89495a12e8b1..9cba0145d449 100644 --- a/packages/models-library/src/models_library/utils/change_case.py +++ b/packages/models-library/src/models_library/utils/change_case.py @@ -4,11 +4,9 @@ Example of usage in pydantic: [...] - class Config: - extra = Extra.forbid - alias_generator = snake_to_camel # <-------- - json_loads = json_loads - json_dumps = json_dumps + model_config = ConfigDict( + alias_generator=snake_to_camel, # <-- note + ) """ From 55b59a30393e3a75ab7af0be8dffa1fc288c3adf Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 16:41:03 +0200 Subject: [PATCH 16/19] unused --- .../src/models_library/service_settings_labels.py | 4 +++- .../simcore_service_api_server/models/_utils_pydantic.py | 6 ------ 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index 0884bda1ba23..2dbeaa2fba94 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -27,7 +27,9 @@ from .services_resources import DEFAULT_SINGLE_SERVICE_NAME _BaseConfig = ConfigDict( - extra="forbid", arbitrary_types_allowed=True, ignored_types=(cached_property,) + extra="forbid", + arbitrary_types_allowed=True, + ignored_types=(cached_property,), ) diff --git a/services/api-server/src/simcore_service_api_server/models/_utils_pydantic.py b/services/api-server/src/simcore_service_api_server/models/_utils_pydantic.py index cb0d3352d941..5e0e3c57120d 100644 --- a/services/api-server/src/simcore_service_api_server/models/_utils_pydantic.py +++ b/services/api-server/src/simcore_service_api_server/models/_utils_pydantic.py @@ -1,16 +1,10 @@ from copy import deepcopy -from common_library.json_serialization import json_dumps, json_loads from pydantic import GetJsonSchemaHandler from pydantic.json_schema import JsonSchemaValue from pydantic_core.core_schema import CoreSchema -class BaseConfig: - json_loads = json_loads - json_dumps = json_dumps - - class UriSchema: """Metadata class to modify openapi schemas of Url fields From 19e37655195ef9a14db070e2d475d36b336c5914 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 23 Apr 2025 23:17:09 +0200 Subject: [PATCH 17/19] bad import --- .../db_listener/_db_comp_tasks_listening_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py b/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py index 61b700cf802e..ea6ee0c2b62b 100644 --- a/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py +++ b/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py @@ -13,7 +13,7 @@ from aiohttp import web from aiopg.sa import Engine from aiopg.sa.connection import SAConnection -from common_types.json_serialization import json_loads +from common_library.json_serialization import json_loads from models_library.errors import ErrorDict from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID From 7f0374a306f7f40a01bdbb3b4946febcc7cdfb29 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Thu, 24 Apr 2025 08:31:12 +0200 Subject: [PATCH 18/19] adapts tests --- .../clusters-keeper/tests/unit/test_utils_clusters.py | 2 +- services/dask-sidecar/tests/unit/test_tasks.py | 10 +++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/services/clusters-keeper/tests/unit/test_utils_clusters.py b/services/clusters-keeper/tests/unit/test_utils_clusters.py index 5a96d17cde0c..dbff4e0ecc8f 100644 --- a/services/clusters-keeper/tests/unit/test_utils_clusters.py +++ b/services/clusters-keeper/tests/unit/test_utils_clusters.py @@ -181,7 +181,7 @@ def test_create_deploy_cluster_stack_script( "WORKERS_EC2_INSTANCES_CUSTOM_TAGS", ] assert all( - re.search(rf"{i}=\'{{(\".+\":\s\".*\")+}}\'", deploy_script) + re.search(rf"{i}=\'{{(\".+\":\s*\".*\")+}}\'", deploy_script) for i in dict_settings ) diff --git a/services/dask-sidecar/tests/unit/test_tasks.py b/services/dask-sidecar/tests/unit/test_tasks.py index b63ecd505f62..0a91c30fe992 100644 --- a/services/dask-sidecar/tests/unit/test_tasks.py +++ b/services/dask-sidecar/tests/unit/test_tasks.py @@ -20,6 +20,7 @@ import distributed import fsspec import pytest +from common_library.json_serialization import json_dumps from dask_task_models_library.container_tasks.docker import DockerBasicAuth from dask_task_models_library.container_tasks.errors import ServiceRuntimeError from dask_task_models_library.container_tasks.events import ( @@ -363,7 +364,14 @@ def sleeper_task( log_file_url=log_file_url, expected_output_data=expected_output_data, expected_logs=[ - '{"input_1": 23, "input_23": "a string input", "the_input_43": 15.0, "the_bool_input_54": false}', + json_dumps( + { + "input_1": 23, + "input_23": "a string input", + "the_input_43": 15.0, + "the_bool_input_54": False, + } + ), "This is the file contents of file #'001'", "This is the file contents of file #'002'", "This is the file contents of file #'003'", From c9fc66a75240aef457e52607d105edfc0a728472 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Thu, 24 Apr 2025 13:12:46 +0200 Subject: [PATCH 19/19] undo cluster and fix dv2 --- .../src/simcore_service_clusters_keeper/utils/clusters.py | 4 ++-- services/clusters-keeper/tests/unit/test_utils_clusters.py | 2 +- .../unit/test_modules_dynamic_sidecar_client_api_thin.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py index cf1543e63925..d2820ef2b882 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py @@ -1,6 +1,7 @@ import base64 import datetime import functools +import json from pathlib import Path from typing import Any, Final @@ -8,7 +9,6 @@ import yaml from aws_library.ec2 import EC2InstanceBootSpecific, EC2InstanceData, EC2Tags from aws_library.ec2._models import CommandStr -from common_library.json_serialization import json_dumps from common_library.serialization import model_dump_with_secrets from fastapi.encoders import jsonable_encoder from models_library.api_schemas_clusters_keeper.clusters import ( @@ -80,7 +80,7 @@ def _convert_to_env_list(entries: list[Any]) -> str: return f"[{entries_as_str}]" def _convert_to_env_dict(entries: dict[str, Any]) -> str: - return f"'{json_dumps(jsonable_encoder(entries))}'" + return f"'{json.dumps(jsonable_encoder(entries))}'" assert app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES # nosec diff --git a/services/clusters-keeper/tests/unit/test_utils_clusters.py b/services/clusters-keeper/tests/unit/test_utils_clusters.py index dbff4e0ecc8f..5a96d17cde0c 100644 --- a/services/clusters-keeper/tests/unit/test_utils_clusters.py +++ b/services/clusters-keeper/tests/unit/test_utils_clusters.py @@ -181,7 +181,7 @@ def test_create_deploy_cluster_stack_script( "WORKERS_EC2_INSTANCES_CUSTOM_TAGS", ] assert all( - re.search(rf"{i}=\'{{(\".+\":\s*\".*\")+}}\'", deploy_script) + re.search(rf"{i}=\'{{(\".+\":\s\".*\")+}}\'", deploy_script) for i in dict_settings ) diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py index 6584020dcb60..7e9d4f429a49 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py @@ -1,11 +1,11 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -import json from collections.abc import AsyncIterable, Callable from typing import Any import pytest +from common_library.json_serialization import json_dumps from faker import Faker from fastapi import FastAPI, status from httpx import Response @@ -182,7 +182,7 @@ async def test_get_containers_name( ) -> None: mock_response = Response(status.HTTP_200_OK) - encoded_filters = json.dumps( + encoded_filters = json_dumps( { "network": dynamic_sidecar_network_name, "exclude": SUFFIX_EGRESS_PROXY_NAME,