Skip to content

Commit bdf71b0

Browse files
authored
Merge branch 'master' into enh/search-by-tags
2 parents 541c6e8 + 118b363 commit bdf71b0

File tree

69 files changed

+1024
-253
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

69 files changed

+1024
-253
lines changed
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
from typing import Final
2+
3+
from pydantic import parse_obj_as
4+
5+
from ..rabbitmq_basic_types import RPCNamespace
6+
7+
DYNAMIC_SIDECAR_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as(
8+
RPCNamespace, "dynamic-sidecar"
9+
)

packages/models-library/src/models_library/api_schemas_dynamic_sidecar/telemetry.py

Lines changed: 72 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,28 @@
11
from abc import abstractmethod
2-
from pathlib import Path
3-
from typing import Protocol
2+
from enum import auto
3+
from typing import Any, Final, Protocol
44

5-
from models_library.projects_nodes_io import NodeID
6-
from pydantic import BaseModel, ByteSize, Field
5+
from pydantic import (
6+
BaseModel,
7+
ByteSize,
8+
Field,
9+
NonNegativeFloat,
10+
NonNegativeInt,
11+
root_validator,
12+
validator,
13+
)
14+
15+
from ..projects_nodes_io import NodeID
16+
from ..utils.enums import StrAutoEnum
17+
18+
_EPSILON: Final[NonNegativeFloat] = 1e-16
19+
20+
21+
class MountPathCategory(StrAutoEnum):
22+
HOST = auto()
23+
STATES_VOLUMES = auto()
24+
INPUTS_VOLUMES = auto()
25+
OUTPUTS_VOLUMES = auto()
726

827

928
class SDiskUsageProtocol(Protocol):
@@ -28,31 +47,71 @@ def percent(self) -> float:
2847
...
2948

3049

50+
def _get_percent(used: float, total: float) -> float:
51+
return round(used * 100 / (total + _EPSILON), 2)
52+
53+
3154
class DiskUsage(BaseModel):
3255
used: ByteSize = Field(description="used space")
3356
free: ByteSize = Field(description="remaining space")
3457

3558
total: ByteSize = Field(description="total space = free + used")
36-
used_percent: float = Field(
59+
used_percent: NonNegativeFloat = Field(
3760
gte=0.00,
3861
lte=100.00,
3962
description="Percent of used space relative to the total space",
4063
)
4164

65+
@validator("free")
4266
@classmethod
43-
def from_ps_util_disk_usage(
44-
cls, ps_util_disk_usage: SDiskUsageProtocol
67+
def _free_positive(cls, v: float) -> float:
68+
if v < 0:
69+
msg = f"free={v} cannot be a negative value"
70+
raise ValueError(msg)
71+
return v
72+
73+
@validator("used")
74+
@classmethod
75+
def _used_positive(cls, v: float) -> float:
76+
if v < 0:
77+
msg = f"used={v} cannot be a negative value"
78+
raise ValueError(msg)
79+
return v
80+
81+
@root_validator(pre=True)
82+
@classmethod
83+
def _check_total(cls, values: dict[str, Any]) -> dict[str, Any]:
84+
total = values["total"]
85+
free = values["free"]
86+
used = values["used"]
87+
if total != free + used:
88+
msg = f"{total=} is different than the sum of {free=}+{used=} => sum={free+used}"
89+
raise ValueError(msg)
90+
return values
91+
92+
@classmethod
93+
def from_efs_guardian(
94+
cls, used: NonNegativeInt, total: NonNegativeInt
4595
) -> "DiskUsage":
46-
total = ps_util_disk_usage.free + ps_util_disk_usage.used
47-
used_percent = round(ps_util_disk_usage.used * 100 / total, 2)
96+
free = total - used
4897
return cls(
49-
used=ByteSize(ps_util_disk_usage.used),
50-
free=ByteSize(ps_util_disk_usage.free),
98+
used=ByteSize(used),
99+
free=ByteSize(free),
51100
total=ByteSize(total),
52-
used_percent=used_percent,
101+
used_percent=_get_percent(used, total),
53102
)
54103

104+
@classmethod
105+
def from_ps_util_disk_usage(
106+
cls, ps_util_disk_usage: SDiskUsageProtocol
107+
) -> "DiskUsage":
108+
total = ps_util_disk_usage.free + ps_util_disk_usage.used
109+
return cls.from_efs_guardian(ps_util_disk_usage.used, total)
110+
111+
def __hash__(self):
112+
return hash((self.used, self.free, self.total, self.used_percent))
113+
55114

56115
class ServiceDiskUsage(BaseModel):
57116
node_id: NodeID
58-
usage: dict[Path, DiskUsage]
117+
usage: dict[MountPathCategory, DiskUsage]

packages/models-library/tests/test_api_schemas_dynamic_sidecar_telemetry.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import pytest
33
from models_library.api_schemas_dynamic_sidecar.telemetry import DiskUsage
44
from psutil._common import sdiskusage
5+
from pydantic import ByteSize, ValidationError
56

67

78
def _assert_same_value(ps_util_disk_usage: sdiskusage) -> None:
@@ -27,3 +28,35 @@ def test_disk_usage_regression_cases(ps_util_disk_usage: sdiskusage):
2728
def test_disk_usage():
2829
ps_util_disk_usage = psutil.disk_usage("/")
2930
_assert_same_value(ps_util_disk_usage)
31+
32+
33+
def test_from_efs_guardian_constructor():
34+
result = DiskUsage.from_efs_guardian(10, 100)
35+
assert result.used == ByteSize(10)
36+
assert result.free == ByteSize(90)
37+
assert result.total == ByteSize(100)
38+
assert result.used_percent == 10
39+
40+
41+
def test_failing_validation():
42+
with pytest.raises(ValidationError) as exc:
43+
assert DiskUsage.from_efs_guardian(100, 10)
44+
45+
assert "free=" in f"{exc.value}"
46+
assert "negative value" in f"{exc.value}"
47+
48+
with pytest.raises(ValidationError) as exc:
49+
assert DiskUsage(
50+
used=-10, # type: ignore
51+
free=ByteSize(10),
52+
total=ByteSize(0),
53+
used_percent=-10,
54+
)
55+
assert "used=" in f"{exc.value}"
56+
assert "negative value" in f"{exc.value}"
57+
58+
with pytest.raises(ValidationError) as exc:
59+
DiskUsage(
60+
used=ByteSize(10), free=ByteSize(10), total=ByteSize(21), used_percent=0
61+
)
62+
assert "is different than the sum of" in f"{exc.value}"

packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/__init__.py

Whitespace-only changes.
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import logging
2+
3+
from models_library.api_schemas_dynamic_sidecar import DYNAMIC_SIDECAR_RPC_NAMESPACE
4+
from models_library.api_schemas_dynamic_sidecar.telemetry import DiskUsage
5+
from models_library.rabbitmq_basic_types import RPCMethodName
6+
from pydantic import parse_obj_as
7+
from servicelib.logging_utils import log_decorator
8+
from servicelib.rabbitmq import RabbitMQRPCClient
9+
10+
_logger = logging.getLogger(__name__)
11+
12+
13+
@log_decorator(_logger, level=logging.DEBUG)
14+
async def update_disk_usage(
15+
rabbitmq_rpc_client: RabbitMQRPCClient, *, usage: dict[str, DiskUsage]
16+
) -> None:
17+
result = await rabbitmq_rpc_client.request(
18+
DYNAMIC_SIDECAR_RPC_NAMESPACE,
19+
parse_obj_as(RPCMethodName, "update_disk_usage"),
20+
usage=usage,
21+
)
22+
assert result is None # nosec

services/api-server/src/simcore_service_api_server/models/schemas/jobs.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
)
3535

3636
JobID: TypeAlias = UUID
37+
assert JobID == ProjectID
3738

3839
# ArgumentTypes are types used in the job inputs (see ResultsTypes)
3940
ArgumentTypes: TypeAlias = (

services/api-server/tests/conftest.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,10 @@
88
import pytest
99
import simcore_service_api_server
1010
from dotenv import dotenv_values
11+
from models_library.projects import ProjectID
12+
from pydantic import parse_obj_as
1113
from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict
14+
from simcore_service_api_server.models.schemas.jobs import JobID
1215

1316
CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
1417

@@ -96,3 +99,11 @@ def tests_utils_dir(project_tests_dir: Path) -> Path:
9699
utils_dir = (project_tests_dir / "utils").resolve()
97100
assert utils_dir.exists()
98101
return utils_dir
102+
103+
104+
## BASIC IDENTIFIERS ---
105+
106+
107+
@pytest.fixture
108+
def job_id(project_id: ProjectID) -> JobID:
109+
return parse_obj_as(JobID, project_id)

services/api-server/tests/unit/test_models_schemas_jobs.py

Lines changed: 55 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,23 @@
33
# pylint: disable=unused-variable
44

55
import random
6+
import textwrap
67
import urllib.parse
78
from copy import deepcopy
89
from uuid import uuid4
910

1011
import pytest
12+
from faker import Faker
1113
from fastapi import FastAPI
14+
from models_library.api_schemas_webserver.projects_metadata import ProjectMetadataGet
15+
from models_library.generics import Envelope
1216
from simcore_service_api_server._meta import API_VTAG
13-
from simcore_service_api_server.models.schemas.jobs import Job, JobInputs
17+
from simcore_service_api_server.models.schemas.jobs import (
18+
Job,
19+
JobID,
20+
JobInputs,
21+
JobMetadata,
22+
)
1423
from simcore_service_api_server.models.schemas.solvers import Solver
1524

1625

@@ -70,3 +79,48 @@ def test_job_resouce_names_has_associated_url(app: FastAPI):
7079
)
7180

7281
assert url_path == f"/{API_VTAG}/{urllib.parse.unquote_plus(job_name)}"
82+
83+
84+
@pytest.mark.acceptance_test(
85+
"Fixing https://github.com/ITISFoundation/osparc-simcore/issues/6556"
86+
)
87+
def test_parsing_job_custom_metadata(job_id: JobID, faker: Faker):
88+
job_name = faker.name()
89+
90+
got = Envelope[ProjectMetadataGet].parse_raw(
91+
textwrap.dedent(
92+
f"""
93+
{{
94+
"data": {{
95+
"projectUuid": "{job_id}",
96+
"custom": {{
97+
"number": 3.14,
98+
"string": "foo",
99+
"boolean": true,
100+
"integer": 42,
101+
"job_id": "{job_id}",
102+
"job_name": "{job_name}"
103+
}}
104+
}}
105+
}}
106+
"""
107+
)
108+
)
109+
110+
assert got.data
111+
assert got.data.custom == {
112+
"number": 3.14,
113+
"string": "foo",
114+
"boolean": True,
115+
"integer": 42,
116+
"job_id": f"{job_id}",
117+
"job_name": job_name,
118+
}
119+
120+
j = JobMetadata(
121+
job_id=job_id,
122+
metadata=got.data.custom or {},
123+
url=faker.url(),
124+
)
125+
126+
assert j.metadata == got.data.custom

services/director-v2/src/simcore_service_director_v2/api/routes/computations.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@
3737
from models_library.utils.fastapi_encoders import jsonable_encoder
3838
from pydantic import AnyHttpUrl, parse_obj_as
3939
from servicelib.async_utils import run_sequentially_in_context
40+
from servicelib.logging_utils import log_decorator
4041
from servicelib.rabbitmq import RabbitMQRPCClient
4142
from simcore_postgres_database.utils_projects_metadata import DBProjectNotFoundError
4243
from starlette import status
@@ -150,6 +151,7 @@ async def _check_pipeline_startable(
150151
_UNKNOWN_NODE: Final[str] = "unknown node"
151152

152153

154+
@log_decorator(_logger)
153155
async def _get_project_metadata(
154156
project_id: ProjectID,
155157
project_repo: ProjectsRepository,
@@ -160,7 +162,7 @@ async def _get_project_metadata(
160162
project_id
161163
)
162164
if project_ancestors.parent_project_uuid is None:
163-
# no parents here
165+
_logger.debug("no parent found for project %s", project_id)
164166
return {}
165167

166168
assert project_ancestors.parent_node_id is not None # nosec

services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ async def get(
5050
)
5151
row: RowProxy | None = await result.first()
5252
if not row:
53-
raise ComputationalRunNotFoundError()
53+
raise ComputationalRunNotFoundError
5454
return CompRunsAtDB.from_orm(row)
5555

5656
async def list(
@@ -80,7 +80,7 @@ async def create(
8080
project_id: ProjectID,
8181
cluster_id: ClusterID,
8282
iteration: PositiveInt | None = None,
83-
metadata: RunMetadataDict | None,
83+
metadata: RunMetadataDict,
8484
use_on_demand_clusters: bool,
8585
) -> CompRunsAtDB:
8686
try:
@@ -102,13 +102,13 @@ async def create(
102102
.values(
103103
user_id=user_id,
104104
project_uuid=f"{project_id}",
105-
cluster_id=cluster_id
106-
if cluster_id != DEFAULT_CLUSTER_ID
107-
else None,
105+
cluster_id=(
106+
cluster_id if cluster_id != DEFAULT_CLUSTER_ID else None
107+
),
108108
iteration=iteration,
109109
result=RUNNING_STATE_TO_DB[RunningState.PUBLISHED],
110-
started=datetime.datetime.now(tz=datetime.timezone.utc),
111-
metadata=jsonable_encoder(metadata) if metadata else None,
110+
started=datetime.datetime.now(tz=datetime.UTC),
111+
metadata=jsonable_encoder(metadata),
112112
use_on_demand_clusters=use_on_demand_clusters,
113113
)
114114
.returning(literal_column("*"))
@@ -146,7 +146,7 @@ async def set_run_result(
146146
) -> CompRunsAtDB | None:
147147
values: dict[str, Any] = {"result": RUNNING_STATE_TO_DB[result_state]}
148148
if final_state:
149-
values.update({"ended": datetime.datetime.now(tz=datetime.timezone.utc)})
149+
values.update({"ended": datetime.datetime.now(tz=datetime.UTC)})
150150
return await self.update(
151151
user_id,
152152
project_id,

0 commit comments

Comments
 (0)