Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .github/copilot-instructions.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,13 @@ This document provides guidelines and best practices for using GitHub Copilot in
- ensure we use `fastapi` >0.100 compatible code
- use f-string formatting


### Json serialization

- Generally use `json_dumps`/`json_loads` from `common_library.json_serialization` to built-in `json.dumps` / `json.loads`.
- Prefer Pydantic model methods (e.g., `model.model_dump_json()`) for serialization.


## Node.js-Specific Instructions

- Use ES6+ syntax and features.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from pathlib import Path
from typing import Annotated, Any, TypeAlias

from common_library.json_serialization import json_loads
from models_library.basic_regex import MIME_TYPE_RE
from models_library.generics import DictModel
from models_library.services_types import ServicePortKey
Expand Down Expand Up @@ -160,7 +161,7 @@ def from_task_output(
with suppress(json.JSONDecodeError):
# NOTE: The suppression here is ok, since if the data is empty,
# there will be a validation error anyway
data = json.loads(output_data_file.read_text())
data = json_loads(output_data_file.read_text())

for output_key, output_params in schema.items():
if isinstance(output_params, FilePortSchema):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import json
import os

from common_library.json_serialization import json_loads
from pydantic_settings import BaseSettings

# Expects env var: FUNCTION_SERVICES_AUTHORS='{"OM":{"name": ...}, "EN":{...} }'
try:
AUTHORS = json.loads(os.environ.get("FUNCTION_SERVICES_AUTHORS", "{}"))
AUTHORS = json_loads(os.environ.get("FUNCTION_SERVICES_AUTHORS", "{}"))
except json.decoder.JSONDecodeError:
AUTHORS = {}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@
from .services_resources import DEFAULT_SINGLE_SERVICE_NAME

_BaseConfig = ConfigDict(
extra="forbid", arbitrary_types_allowed=True, ignored_types=(cached_property,)
extra="forbid",
arbitrary_types_allowed=True,
ignored_types=(cached_property,),
)


Expand Down
Original file line number Diff line number Diff line change
@@ -1,16 +1,15 @@
""" String convesion
"""String convesion


Example of usage in pydantic:

[...]
class Config:
extra = Extra.forbid
alias_generator = snake_to_camel # <--------
json_loads = orjson.loads
json_dumps = json_dumps
model_config = ConfigDict(
alias_generator=snake_to_camel, # <-- note
)

"""

# Partially taken from https://github.com/autoferrit/python-change-case/blob/master/change_case/change_case.py#L131
import re
from typing import Final
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
""" Image labels annotations
"""Image labels annotations

osparc expects the service configuration (in short: config) attached to the service's image as label annotations.
This module defines how this config is serialized/deserialized to/from docker labels annotations
"""

import json
from json.decoder import JSONDecodeError
from typing import Any, TypeAlias

from common_library.json_serialization import json_dumps
from common_library.json_serialization import json_dumps, json_loads

LabelsAnnotationsDict: TypeAlias = dict[str, str | float | bool | None]

Expand Down Expand Up @@ -57,7 +56,7 @@ def from_labels(
for key, label in labels.items():
if key.startswith(f"{prefix_key}."):
try:
value = json.loads(label) # type: ignore
value = json_loads(label)
except JSONDecodeError:
value = label

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ async def compute_node_hash(
if payload is not None:
resolved_payload[port_type][port_key] = payload

# now create the hash
# WARNING: Here we cannot change to json_serialization.json_dumps because if would create a different dump string and therefore a different hash
# typically test_node_ports_v2_serialization_v2.py::test_dump will fail if you do this change.
# NOTE that these hashes might have been already stored elsewhere
block_string = json.dumps(resolved_payload, sort_keys=True).encode("utf-8")
raw_hash = hashlib.sha256(block_string)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
- Every product has a front-end with exactly the same name
"""

import json
from typing import Literal

import sqlalchemy as sa
from common_library.json_serialization import json_dumps
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.sql import func
from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict
Expand Down Expand Up @@ -114,7 +114,7 @@ class ProductLoginSettingsDict(TypedDict, total=False):

# NOTE: defaults affects migration!!
LOGIN_SETTINGS_DEFAULT = ProductLoginSettingsDict() # = {}
_LOGIN_SETTINGS_SERVER_DEFAULT = json.dumps(LOGIN_SETTINGS_DEFAULT)
_LOGIN_SETTINGS_SERVER_DEFAULT = json_dumps(LOGIN_SETTINGS_DEFAULT)


#
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import json
from pathlib import Path
from typing import Annotated, Final

import rich
import typer
import yaml
from common_library.json_serialization import json_loads
from models_library.utils.labels_annotations import LabelsAnnotationsDict
from pydantic import BaseModel

Expand Down Expand Up @@ -57,7 +57,7 @@ def _save(service_name: str, filename: Path, model: BaseModel):
rich.print(f"Creating {output_path} ...", end="")

with output_path.open("wt") as fh:
data = json.loads(
data = json_loads(
model.model_dump_json(by_alias=True, exclude_none=True)
)
yaml.safe_dump(data, fh, sort_keys=False)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import jsonschema
import pytest
import yaml
from common_library.json_serialization import json_loads
from docker.errors import APIError
from docker.models.containers import Container

Expand Down Expand Up @@ -206,7 +207,7 @@ def convert_to_simcore_labels(image_labels: dict) -> dict:
io_simcore_labels = {}
for key, value in image_labels.items():
if str(key).startswith("io.simcore."):
simcore_label = json.loads(value)
simcore_label = json_loads(value)
simcore_keys = list(simcore_label.keys())
assert len(simcore_keys) == 1
simcore_key = simcore_keys[0]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
SEE https://gist.github.com/amitripshtos/854da3f4217e3441e8fceea85b0cbd91
"""

import json
import logging
from collections.abc import Awaitable, Callable
from typing import Any, Union
Expand All @@ -12,7 +11,7 @@
from aiohttp.web_request import Request
from aiohttp.web_response import StreamResponse
from common_library.error_codes import create_error_code
from common_library.json_serialization import json_dumps
from common_library.json_serialization import json_dumps, json_loads
from models_library.rest_error import ErrorGet, ErrorItemType, LogMessageType

from ..logging_errors import create_troubleshotting_log_kwargs
Expand Down Expand Up @@ -107,7 +106,7 @@ async def _middleware_handler(request: web.Request, handler: Handler):
err.content_type = MIMETYPE_APPLICATION_JSON
if err.text:
try:
payload = json.loads(err.text)
payload = json_loads(err.text)
if not is_enveloped_from_map(payload):
payload = wrap_as_envelope(data=payload)
err.text = json_dumps(payload)
Expand Down
4 changes: 3 additions & 1 deletion packages/service-library/src/servicelib/rest_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
from collections.abc import Mapping
from typing import Any

from common_library.json_serialization import json_loads

_ENVELOPE_KEYS = ("data", "error")


Expand All @@ -11,7 +13,7 @@ def is_enveloped_from_map(payload: Mapping) -> bool:

def is_enveloped_from_text(text: str) -> bool:
try:
payload = json.loads(text)
payload = json_loads(text)
except json.decoder.JSONDecodeError:
return False
return is_enveloped_from_map(payload)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import json
import logging

import sqlalchemy as sa
from common_library.json_serialization import json_dumps, json_loads

from models_library.projects import ProjectID
from models_library.users import UserID
from pydantic import TypeAdapter
Expand Down Expand Up @@ -85,7 +86,7 @@ async def write_ports_configuration(
)
_logger.debug(message)

node_configuration = json.loads(json_configuration)
node_configuration = json_loads(json_configuration)
async with (
DBContextManager(self._db_engine) as engine,
engine.begin() as connection,
Expand Down Expand Up @@ -116,7 +117,7 @@ async def get_ports_configuration_from_node_uuid(
engine.connect() as connection,
):
node = await _get_node_from_db(project_id, node_uuid, connection)
node_json_config = json.dumps(
node_json_config = json_dumps(
{
"schema": node.schema,
"inputs": node.inputs,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import asyncio
import json
import logging
from collections.abc import AsyncGenerator, Coroutine
from contextlib import AsyncExitStack
Expand All @@ -17,6 +16,7 @@
ClientSession,
RequestInfo,
)
from common_library.json_serialization import json_loads
from models_library.api_schemas_storage.storage_schemas import (
ETag,
FileUploadSchema,
Expand Down Expand Up @@ -143,8 +143,7 @@ class ProgressData:

@runtime_checkable
class LogRedirectCB(Protocol):
async def __call__(self, log: str) -> None:
...
async def __call__(self, log: str) -> None: ...


async def _file_chunk_writer(
Expand Down Expand Up @@ -276,7 +275,7 @@ async def _session_put(
assert response.status == status.HTTP_200_OK # nosec
assert response.headers # nosec
assert "Etag" in response.headers # nosec
etag: str = json.loads(response.headers["Etag"])
etag: str = json_loads(response.headers["Etag"])
return etag


Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
import functools
import json
import logging
from pprint import pformat
from typing import Any

import pydantic
from common_library.json_serialization import json_dumps
from common_library.json_serialization import json_dumps, json_loads
from models_library.projects_nodes_io import NodeID
from models_library.utils.nodes import compute_node_hash
from packaging import version
Expand Down Expand Up @@ -50,7 +49,7 @@ async def load(
port_config_str: str = await db_manager.get_ports_configuration_from_node_uuid(
project_id, node_uuid
)
port_cfg = json.loads(port_config_str)
port_cfg = json_loads(port_config_str)

log.debug(f"{port_cfg=}") # pylint: disable=logging-fstring-interpolation
if any(k not in port_cfg for k in NODE_REQUIRED_KEYS):
Expand Down
2 changes: 1 addition & 1 deletion packages/simcore-sdk/tests/unit/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def node_uuid() -> str:
return str(uuid4())


@pytest.fixture(scope="function")
@pytest.fixture
async def mock_db_manager(
monkeypatch,
project_id: str,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,16 +1,10 @@
from copy import deepcopy

from common_library.json_serialization import json_dumps, json_loads
from pydantic import GetJsonSchemaHandler
from pydantic.json_schema import JsonSchemaValue
from pydantic_core.core_schema import CoreSchema


class BaseConfig:
json_loads = json_loads
json_dumps = json_dumps


class UriSchema:
"""Metadata class to modify openapi schemas of Url fields

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import json

from common_library.json_serialization import json_dumps
from fastapi import FastAPI

from ..core.settings import ApplicationSettings
Expand All @@ -14,7 +13,7 @@ def create_lock_key_and_value(app: FastAPI) -> tuple[str, str]:
"dynamic",
*app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS,
]
lock_value = json.dumps(
lock_value = json_dumps(
{
"node_labels": app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS
}
Expand All @@ -24,7 +23,7 @@ def create_lock_key_and_value(app: FastAPI) -> tuple[str, str]:
"computational",
f"{app_settings.AUTOSCALING_DASK.DASK_MONITORING_URL}",
]
lock_value = json.dumps(
lock_value = json_dumps(
{"scheduler_url": f"{app_settings.AUTOSCALING_DASK.DASK_MONITORING_URL}"}
)
lock_key = ":".join(f"{k}" for k in lock_key_parts)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
""" Free helper functions for AWS API
"""Free helper functions for AWS API"""

"""

import json
import logging
from collections import OrderedDict
from collections.abc import Callable
from textwrap import dedent

from aws_library.ec2 import AWSTagKey, AWSTagValue, EC2InstanceType, EC2Tags, Resources
from common_library.json_serialization import json_dumps

from .._meta import VERSION
from ..core.errors import ConfigurationError, TaskBestFittingInstanceNotFoundError
Expand All @@ -23,12 +21,12 @@ def get_ec2_tags_dynamic(app_settings: ApplicationSettings) -> EC2Tags:
return {
AWSTagKey("io.simcore.autoscaling.version"): AWSTagValue(f"{VERSION}"),
AWSTagKey("io.simcore.autoscaling.monitored_nodes_labels"): AWSTagValue(
json.dumps(
json_dumps(
app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS
)
),
AWSTagKey("io.simcore.autoscaling.monitored_services_labels"): AWSTagValue(
json.dumps(
json_dumps(
app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_SERVICE_LABELS
)
),
Expand Down
Loading
Loading