Skip to content

Commit 8fc60c0

Browse files
authored
Merge branch 'master' into pr-osparc-fix-devel-mode-notifications
2 parents 988ef18 + 396202b commit 8fc60c0

File tree

72 files changed

+1379
-966
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

72 files changed

+1379
-966
lines changed

packages/aws-library/requirements/_base.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -384,7 +384,7 @@ typer==0.15.2
384384
# via
385385
# -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in
386386
# -r requirements/../../../packages/settings-library/requirements/_base.in
387-
types-aiobotocore==2.21.0
387+
types-aiobotocore==2.21.1
388388
# via -r requirements/_base.in
389389
types-aiobotocore-ec2==2.21.0
390390
# via types-aiobotocore

packages/aws-library/requirements/_test.txt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -280,17 +280,17 @@ sympy==1.13.3
280280
# via cfn-lint
281281
termcolor==2.5.0
282282
# via pytest-sugar
283-
types-aioboto3==14.0.0
283+
types-aioboto3==14.1.0
284284
# via -r requirements/_test.in
285-
types-aiobotocore==2.21.0
285+
types-aiobotocore==2.21.1
286286
# via
287287
# -c requirements/_base.txt
288288
# types-aioboto3
289289
types-awscrt==0.23.10
290290
# via
291291
# -c requirements/_base.txt
292292
# botocore-stubs
293-
types-boto3==1.37.4
293+
types-boto3==1.38.2
294294
# via -r requirements/_test.in
295295
types-s3transfer==0.11.3
296296
# via

packages/aws-library/src/aws_library/s3/_client.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,13 +88,21 @@ async def create(
8888
session_client = None
8989
exit_stack = contextlib.AsyncExitStack()
9090
try:
91+
config = Config(
92+
# This setting tells the S3 client to only calculate checksums when explicitly required
93+
# by the operation. This avoids unnecessary checksum calculations for operations that
94+
# don't need them, improving performance.
95+
# See: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/s3.html#calculating-checksums
96+
signature_version="s3v4",
97+
request_checksum_calculation="when_required", # type: ignore[call-arg]
98+
)
9199
session_client = session.client( # type: ignore[call-overload]
92100
"s3",
93101
endpoint_url=f"{settings.S3_ENDPOINT}",
94102
aws_access_key_id=settings.S3_ACCESS_KEY,
95103
aws_secret_access_key=settings.S3_SECRET_KEY,
96104
region_name=settings.S3_REGION,
97-
config=Config(signature_version="s3v4"),
105+
config=config,
98106
)
99107
assert isinstance(session_client, ClientCreatorContext) # nosec
100108

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
from typing import Final
2+
from uuid import uuid4
3+
4+
from models_library.projects import ProjectID
5+
from models_library.projects_nodes_io import NodeID
6+
from models_library.services_types import ServiceKey, ServiceVersion
7+
from models_library.users import UserID
8+
from pydantic import TypeAdapter
9+
10+
from ..models import DaskJobID
11+
12+
13+
def generate_dask_job_id(
14+
service_key: ServiceKey,
15+
service_version: ServiceVersion,
16+
user_id: UserID,
17+
project_id: ProjectID,
18+
node_id: NodeID,
19+
) -> DaskJobID:
20+
"""creates a dask job id:
21+
The job ID shall contain the user_id, project_id, node_id
22+
Also, it must be unique
23+
and it is shown in the Dask scheduler dashboard website
24+
"""
25+
return DaskJobID(
26+
f"{service_key}:{service_version}:userid_{user_id}:projectid_{project_id}:nodeid_{node_id}:uuid_{uuid4()}"
27+
)
28+
29+
30+
_JOB_ID_PARTS: Final[int] = 6
31+
32+
33+
def parse_dask_job_id(
34+
job_id: str,
35+
) -> tuple[ServiceKey, ServiceVersion, UserID, ProjectID, NodeID]:
36+
parts = job_id.split(":")
37+
assert len(parts) == _JOB_ID_PARTS # nosec
38+
return (
39+
parts[0],
40+
parts[1],
41+
TypeAdapter(UserID).validate_python(parts[2][len("userid_") :]),
42+
ProjectID(parts[3][len("projectid_") :]),
43+
NodeID(parts[4][len("nodeid_") :]),
44+
)
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
from typing import TypeAlias
2+
3+
DaskJobID: TypeAlias = str
4+
DaskResources: TypeAlias = dict[str, int | float]
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
# pylint: disable=too-many-positional-arguments
2+
# pylint:disable=redefined-outer-name
3+
# pylint:disable=too-many-arguments
4+
# pylint:disable=unused-argument
5+
# pylint:disable=unused-variable
6+
7+
import pytest
8+
from dask_task_models_library.container_tasks.utils import (
9+
generate_dask_job_id,
10+
parse_dask_job_id,
11+
)
12+
from faker import Faker
13+
from models_library.projects import ProjectID
14+
from models_library.projects_nodes_io import NodeID
15+
from models_library.services_types import ServiceKey, ServiceVersion
16+
from models_library.users import UserID
17+
from pydantic import TypeAdapter
18+
19+
20+
@pytest.fixture(
21+
params=["simcore/service/comp/some/fake/service/key", "dockerhub-style/service_key"]
22+
)
23+
def service_key(request) -> ServiceKey:
24+
return request.param
25+
26+
27+
@pytest.fixture()
28+
def service_version() -> str:
29+
return "1234.32432.2344"
30+
31+
32+
@pytest.fixture
33+
def user_id(faker: Faker) -> UserID:
34+
return TypeAdapter(UserID).validate_python(faker.pyint(min_value=1))
35+
36+
37+
@pytest.fixture
38+
def project_id(faker: Faker) -> ProjectID:
39+
return ProjectID(faker.uuid4())
40+
41+
42+
@pytest.fixture
43+
def node_id(faker: Faker) -> NodeID:
44+
return NodeID(faker.uuid4())
45+
46+
47+
def test_dask_job_id_serialization(
48+
service_key: ServiceKey,
49+
service_version: ServiceVersion,
50+
user_id: UserID,
51+
project_id: ProjectID,
52+
node_id: NodeID,
53+
):
54+
dask_job_id = generate_dask_job_id(
55+
service_key, service_version, user_id, project_id, node_id
56+
)
57+
(
58+
parsed_service_key,
59+
parsed_service_version,
60+
parsed_user_id,
61+
parsed_project_id,
62+
parsed_node_id,
63+
) = parse_dask_job_id(dask_job_id)
64+
assert service_key == parsed_service_key
65+
assert service_version == parsed_service_version
66+
assert user_id == parsed_user_id
67+
assert project_id == parsed_project_id
68+
assert node_id == parsed_node_id

packages/models-library/src/models_library/projects.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,8 +113,8 @@ class ProjectAtDB(BaseProjectModel):
113113

114114
published: Annotated[
115115
bool | None,
116-
Field(default=False, description="Defines if a study is available publicly"),
117-
]
116+
Field(description="Defines if a study is available publicly"),
117+
] = False
118118

119119
@field_validator("project_type", mode="before")
120120
@classmethod

0 commit comments

Comments
 (0)