Skip to content

Commit cc605bc

Browse files
committed
Merge branch 'master' into 7548-monitor-api-servers-log-streaming-queues
2 parents aea1884 + d6deede commit cc605bc

File tree

69 files changed

+1803
-960
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

69 files changed

+1803
-960
lines changed

.env-devel

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,10 +86,10 @@ DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS=null
8686
DIRECTOR_TRACING=null
8787

8888
DOCKER_API_PROXY_HOST=docker-api-proxy
89-
DOCKER_API_PROXY_PASSWORD=null
89+
DOCKER_API_PROXY_PASSWORD=admin
9090
DOCKER_API_PROXY_PORT=8888
9191
DOCKER_API_PROXY_SECURE=False
92-
DOCKER_API_PROXY_USER=null
92+
DOCKER_API_PROXY_USER=admin
9393

9494
EFS_USER_ID=8006
9595
EFS_USER_NAME=efs

.github/workflows/ci-testing-deploy.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -573,7 +573,7 @@ jobs:
573573
- name: install uv
574574
uses: astral-sh/setup-uv@v6
575575
with:
576-
version: "0.5.x"
576+
version: "0.6.x"
577577
enable-cache: false
578578
cache-dependency-glob: "**/notifications/requirements/ci.txt"
579579
- name: show system version

packages/models-library/src/models_library/rpc/webserver/projects.py

Lines changed: 23 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ class ProjectJobRpcGet(BaseModel):
2929
workbench: NodesDict
3030

3131
# timestamps
32-
creation_at: datetime
32+
created_at: datetime
3333
modified_at: datetime
3434

3535
# Specific to jobs
@@ -43,12 +43,30 @@ def _update_json_schema_extra(schema: JsonDict) -> None:
4343
"examples": [
4444
{
4545
"uuid": "12345678-1234-5678-1234-123456789012",
46-
"name": "My project",
47-
"description": "My project description",
46+
"name": "A solver job",
47+
"description": "A description of a solver job with a single node",
4848
"workbench": {f"{uuid4()}": n for n in nodes_examples[2:3]},
49-
"creation_at": "2023-01-01T00:00:00Z",
49+
"created_at": "2023-01-01T00:00:00Z",
5050
"modified_at": "2023-01-01T00:00:00Z",
51-
"job_parent_resource_name": "solvers/foo/release/1.2.3",
51+
"job_parent_resource_name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.2",
52+
},
53+
{
54+
"uuid": "00000000-1234-5678-1234-123456789012",
55+
"name": "A study job",
56+
"description": "A description of a study job with many node",
57+
"workbench": {f"{uuid4()}": n for n in nodes_examples},
58+
"created_at": "2023-02-01T00:00:00Z",
59+
"modified_at": "2023-02-01T00:00:00Z",
60+
"job_parent_resource_name": "studies/96642f2a-a72c-11ef-8776-02420a00087d",
61+
},
62+
{
63+
"uuid": "00000000-0000-5678-1234-123456789012",
64+
"name": "A program job",
65+
"description": "A program of a solver job with a single node",
66+
"workbench": {f"{uuid4()}": n for n in nodes_examples[2:3]},
67+
"created_at": "2023-03-01T00:00:00Z",
68+
"modified_at": "2023-03-01T00:00:00Z",
69+
"job_parent_resource_name": "program/simcore%2Fservices%2Fdynamic%2Fjupyter/releases/5.0.2",
5270
},
5371
]
5472
}

packages/models-library/src/models_library/utils/common_validators.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
""" Reusable validators
1+
"""Reusable validators
22
33
Example:
44
@@ -22,10 +22,19 @@ class MyModel(BaseModel):
2222

2323
from common_library.json_serialization import json_loads
2424
from orjson import JSONDecodeError
25-
from pydantic import BaseModel
25+
from pydantic import BaseModel, BeforeValidator
2626
from pydantic.alias_generators import to_camel
2727

2828

29+
def trim_string_before(max_length: int) -> BeforeValidator:
30+
def _trim(value: str):
31+
if isinstance(value, str):
32+
return value[:max_length]
33+
return value
34+
35+
return BeforeValidator(_trim)
36+
37+
2938
def empty_str_to_none_pre_validator(value: Any):
3039
if isinstance(value, str) and value.strip() == "":
3140
return None

packages/models-library/tests/test_utils_common_validators.py

Lines changed: 67 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,15 @@
11
from enum import Enum
2+
from typing import Annotated
23

34
import pytest
45
from models_library.utils.common_validators import (
56
create_enums_pre_validator,
67
empty_str_to_none_pre_validator,
78
none_to_empty_str_pre_validator,
89
null_or_none_str_to_none_validator,
10+
trim_string_before,
911
)
10-
from pydantic import BaseModel, ValidationError, field_validator
12+
from pydantic import BaseModel, StringConstraints, ValidationError, field_validator
1113

1214

1315
def test_enums_pre_validator():
@@ -89,3 +91,67 @@ class Model(BaseModel):
8991

9092
model = Model.model_validate({"message": ""})
9193
assert model == Model.model_validate({"message": ""})
94+
95+
96+
def test_trim_string_before():
97+
max_length = 10
98+
99+
class ModelWithTrim(BaseModel):
100+
text: Annotated[str, trim_string_before(max_length=max_length)]
101+
102+
# Test with string shorter than max_length
103+
short_text = "Short"
104+
model = ModelWithTrim(text=short_text)
105+
assert model.text == short_text
106+
107+
# Test with string equal to max_length
108+
exact_text = "1234567890" # 10 characters
109+
model = ModelWithTrim(text=exact_text)
110+
assert model.text == exact_text
111+
112+
# Test with string longer than max_length
113+
long_text = "This is a very long text that should be trimmed"
114+
model = ModelWithTrim(text=long_text)
115+
assert model.text == long_text[:max_length]
116+
assert len(model.text) == max_length
117+
118+
# Test with non-string value (should be left unchanged)
119+
class ModelWithTrimOptional(BaseModel):
120+
text: Annotated[str | None, trim_string_before(max_length=max_length)]
121+
122+
model = ModelWithTrimOptional(text=None)
123+
assert model.text is None
124+
125+
126+
def test_trim_string_before_with_string_constraints():
127+
max_length = 10
128+
129+
class ModelWithTrimAndConstraints(BaseModel):
130+
text: Annotated[
131+
str | None,
132+
StringConstraints(
133+
max_length=max_length
134+
), # NOTE: order does not matter for validation but has an effect in the openapi schema
135+
trim_string_before(max_length=max_length),
136+
]
137+
138+
# Check that the OpenAPI schema contains the string constraint
139+
schema = ModelWithTrimAndConstraints.model_json_schema()
140+
assert schema["properties"]["text"] == {
141+
"anyOf": [{"maxLength": max_length, "type": "string"}, {"type": "null"}],
142+
"title": "Text",
143+
}
144+
145+
# Test with string longer than max_length
146+
# This should pass because trim_string_before runs first and trims the input
147+
# before StringConstraints validation happens
148+
long_text = "This is a very long text that should be trimmed"
149+
model = ModelWithTrimAndConstraints(text=long_text)
150+
assert model.text is not None
151+
assert model.text == long_text[:max_length]
152+
assert len(model.text) == max_length
153+
154+
# Test with string exactly at max_length
155+
exact_text = "1234567890" # 10 characters
156+
model = ModelWithTrimAndConstraints(text=exact_text)
157+
assert model.text == exact_text

packages/pytest-simcore/src/pytest_simcore/docker_api_proxy.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import logging
22

33
import pytest
4-
from aiohttp import ClientSession, ClientTimeout
4+
from aiohttp import BasicAuth, ClientSession, ClientTimeout
55
from pydantic import TypeAdapter
66
from settings_library.docker_api_proxy import DockerApiProxysettings
77
from tenacity import before_sleep_log, retry, stop_after_delay, wait_fixed
@@ -22,7 +22,13 @@
2222
async def _wait_till_docker_api_proxy_is_responsive(
2323
settings: DockerApiProxysettings,
2424
) -> None:
25-
async with ClientSession(timeout=ClientTimeout(1, 1, 1, 1, 1)) as client:
25+
async with ClientSession(
26+
timeout=ClientTimeout(total=1),
27+
auth=BasicAuth(
28+
settings.DOCKER_API_PROXY_USER,
29+
settings.DOCKER_API_PROXY_PASSWORD.get_secret_value(),
30+
),
31+
) as client:
2632
response = await client.get(f"{settings.base_url}/version")
2733
assert response.status == 200, await response.text()
2834

@@ -44,6 +50,12 @@ async def docker_api_proxy_settings(
4450
{
4551
"DOCKER_API_PROXY_HOST": get_localhost_ip(),
4652
"DOCKER_API_PROXY_PORT": published_port,
53+
"DOCKER_API_PROXY_USER": env_vars_for_docker_compose[
54+
"DOCKER_API_PROXY_USER"
55+
],
56+
"DOCKER_API_PROXY_PASSWORD": env_vars_for_docker_compose[
57+
"DOCKER_API_PROXY_PASSWORD"
58+
],
4759
}
4860
)
4961

packages/pytest-simcore/src/pytest_simcore/helpers/webserver_rpc_server.py

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,10 @@
88
from models_library.products import ProductName
99
from models_library.projects import ProjectID
1010
from models_library.rest_pagination import PageOffsetInt
11-
from models_library.rpc.webserver.projects import PageRpcProjectJobRpcGet
11+
from models_library.rpc.webserver.projects import (
12+
PageRpcProjectJobRpcGet,
13+
ProjectJobRpcGet,
14+
)
1215
from models_library.rpc_pagination import (
1316
DEFAULT_NUMBER_OF_ITEMS_PER_PAGE,
1417
PageLimitInt,
@@ -54,19 +57,28 @@ async def list_projects_marked_as_jobs(
5457
offset: PageOffsetInt = 0,
5558
limit: PageLimitInt = DEFAULT_NUMBER_OF_ITEMS_PER_PAGE,
5659
# filters
57-
job_parent_resource_name_filter: str | None = None,
60+
job_parent_resource_name_prefix: str | None = None,
5861
) -> PageRpcProjectJobRpcGet:
5962
assert rpc_client
6063
assert product_name
6164
assert user_id
6265

63-
if job_parent_resource_name_filter:
64-
assert not job_parent_resource_name_filter.startswith("/")
66+
if job_parent_resource_name_prefix:
67+
assert not job_parent_resource_name_prefix.startswith("/")
68+
assert not job_parent_resource_name_prefix.endswith("%")
69+
assert not job_parent_resource_name_prefix.startswith("%")
6570

66-
items = PageRpcProjectJobRpcGet.model_json_schema()["examples"]
71+
items = [
72+
item
73+
for item in ProjectJobRpcGet.model_json_schema()["examples"]
74+
if job_parent_resource_name_prefix is None
75+
or item.get("job_parent_resource_name").startswith(
76+
job_parent_resource_name_prefix
77+
)
78+
]
6779

6880
return PageRpcProjectJobRpcGet.create(
69-
items[offset, : offset + limit],
81+
items[offset : offset + limit],
7082
total=len(items),
7183
limit=limit,
7284
offset=offset,

packages/pytest-simcore/src/pytest_simcore/simcore_services.py

Lines changed: 29 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from collections.abc import Iterator
1010
from dataclasses import dataclass
1111
from io import StringIO
12+
from typing import Final
1213

1314
import aiohttp
1415
import pytest
@@ -27,7 +28,7 @@
2728
log = logging.getLogger(__name__)
2829

2930

30-
_SERVICES_TO_SKIP = {
31+
_SERVICES_TO_SKIP: Final[set[str]] = {
3132
"agent", # global mode deploy (NO exposed ports, has http API)
3233
"dask-sidecar", # global mode deploy (NO exposed ports, **NO** http API)
3334
"migration",
@@ -41,9 +42,8 @@
4142
"sto-worker-cpu-bound",
4243
}
4344
# TODO: unify healthcheck policies see https://github.com/ITISFoundation/osparc-simcore/pull/2281
44-
SERVICE_PUBLISHED_PORT = {}
45-
DEFAULT_SERVICE_HEALTHCHECK_ENTRYPOINT = "/v0/"
46-
MAP_SERVICE_HEALTHCHECK_ENTRYPOINT = {
45+
DEFAULT_SERVICE_HEALTHCHECK_ENTRYPOINT: Final[str] = "/v0/"
46+
MAP_SERVICE_HEALTHCHECK_ENTRYPOINT: Final[dict[str, str]] = {
4747
"autoscaling": "/",
4848
"clusters-keeper": "/",
4949
"dask-scheduler": "/health",
@@ -57,16 +57,23 @@
5757
"resource-usage-tracker": "/",
5858
"docker-api-proxy": "/version",
5959
}
60-
AIOHTTP_BASED_SERVICE_PORT: int = 8080
61-
FASTAPI_BASED_SERVICE_PORT: int = 8000
62-
DASK_SCHEDULER_SERVICE_PORT: int = 8787
63-
DOCKER_API_PROXY_SERVICE_PORT: int = 8888
6460

65-
_SERVICE_NAME_REPLACEMENTS: dict[str, str] = {
61+
# some services require authentication to access their health-check endpoints
62+
_BASE_AUTH_ENV_VARS: Final[dict[str, tuple[str, str]]] = {
63+
"docker-api-proxy": ("DOCKER_API_PROXY_USER", "DOCKER_API_PROXY_PASSWORD"),
64+
}
65+
66+
_SERVICE_NAME_REPLACEMENTS: Final[dict[str, str]] = {
6667
"dynamic-scheduler": "dynamic-schdlr",
6768
}
6869

69-
_ONE_SEC_TIMEOUT = ClientTimeout(total=1) # type: ignore
70+
71+
AIOHTTP_BASED_SERVICE_PORT: Final[int] = 8080
72+
FASTAPI_BASED_SERVICE_PORT: Final[int] = 8000
73+
DASK_SCHEDULER_SERVICE_PORT: Final[int] = 8787
74+
DOCKER_API_PROXY_SERVICE_PORT: Final[int] = 8888
75+
76+
_ONE_SEC_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=1) # type: ignore
7077

7178

7279
async def wait_till_service_healthy(service_name: str, endpoint: URL):
@@ -108,13 +115,12 @@ class ServiceHealthcheckEndpoint:
108115
@classmethod
109116
def create(cls, service_name: str, baseurl):
110117
# TODO: unify healthcheck policies see https://github.com/ITISFoundation/osparc-simcore/pull/2281
111-
obj = cls(
118+
return cls(
112119
name=service_name,
113120
url=URL(
114121
f"{baseurl}{MAP_SERVICE_HEALTHCHECK_ENTRYPOINT.get(service_name, DEFAULT_SERVICE_HEALTHCHECK_ENTRYPOINT)}"
115122
),
116123
)
117-
return obj
118124

119125

120126
@pytest.fixture(scope="module")
@@ -140,9 +146,17 @@ def services_endpoint(
140146
DASK_SCHEDULER_SERVICE_PORT,
141147
DOCKER_API_PROXY_SERVICE_PORT,
142148
]
143-
endpoint = URL(
144-
f"http://{get_localhost_ip()}:{get_service_published_port(full_service_name, target_ports)}"
145-
)
149+
if service in _BASE_AUTH_ENV_VARS:
150+
user_env, password_env = _BASE_AUTH_ENV_VARS[service]
151+
user = env_vars_for_docker_compose[user_env]
152+
password = env_vars_for_docker_compose[password_env]
153+
endpoint = URL(
154+
f"http://{user}:{password}@{get_localhost_ip()}:{get_service_published_port(full_service_name, target_ports)}"
155+
)
156+
else:
157+
endpoint = URL(
158+
f"http://{get_localhost_ip()}:{get_service_published_port(full_service_name, target_ports)}"
159+
)
146160
services_endpoint[service] = endpoint
147161
else:
148162
print(f"Collecting service endpoints: '{service}' skipped")

packages/service-integration/Dockerfile

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,15 @@
44
ARG PYTHON_VERSION="3.11.9"
55
ARG UV_VERSION="0.6"
66
FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build
7-
# we docker image is built based on debian
8-
FROM python:${PYTHON_VERSION}-slim-bookworm AS base
7+
8+
FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64
9+
# These environment variables are necessary because of https://github.com/astral-sh/uv/issues/6105
10+
# and until https://gitlab.com/qemu-project/qemu/-/issues/2846 gets fixed
11+
ENV UV_CONCURRENT_INSTALLS=1
12+
13+
FROM python:${PYTHON_VERSION}-slim-bookworm AS base-amd64
14+
15+
FROM base-${TARGETARCH} AS base
916

1017
LABEL maintainer=pcrespov
1118

@@ -66,13 +73,7 @@ COPY --from=uv_build /uv /uvx /bin/
6673
# packages may be moved to production image easily by copying the venv
6774
RUN uv venv "${VIRTUAL_ENV}"
6875

69-
# Set UV_CONCURRENT_INSTALLS=1 for ARM64 to improve build performance
70-
# These environment variables are necessary because of https://github.com/astral-sh/uv/issues/6105
71-
# and until https://gitlab.com/qemu-project/qemu/-/issues/2846 gets fixed
72-
RUN if [ "$TARGETARCH" = "arm64" ]; then \
73-
echo "Setting UV_CONCURRENT_INSTALLS=1 for ARM64 architecture"; \
74-
export UV_CONCURRENT_INSTALLS=1; \
75-
fi
76+
7677

7778
RUN --mount=type=cache,target=/root/.cache/uv \
7879
uv pip install --upgrade \

0 commit comments

Comments
 (0)