Skip to content

Commit a5d8ed7

Browse files
Merge remote-tracking branch 'origin/is4481/upgrade-libs' into is4481/upgrade-services
2 parents c138339 + 9ebf506 commit a5d8ed7

File tree

46 files changed

+284
-194
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

46 files changed

+284
-194
lines changed

packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -175,7 +175,7 @@ def from_task_output(
175175
msg = f"Could not locate '{output_key}' in {output_data_file}"
176176
raise ValueError(msg)
177177

178-
return cls.parse_obj(data)
178+
return cls.model_validate(data)
179179

180180
model_config = ConfigDict(
181181
json_schema_extra={

packages/dask-task-models-library/tests/container_tasks/test_io.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ def test_io_models_examples(model_cls, model_cls_examples):
3030
for name, example in model_cls_examples.items():
3131
print(name, ":", pformat(example))
3232

33-
model_instance = model_cls.parse_obj(example)
33+
model_instance = model_cls.model_validate(example)
3434

3535
assert model_instance, f"Failed with {name}"
3636
print(name, ":", model_instance)
@@ -73,7 +73,7 @@ def test_create_task_output_from_task_with_optional_fields_as_required(
7373
"examples"
7474
]:
7575

76-
task_output_schema = TaskOutputDataSchema.parse_obj(schema_example)
76+
task_output_schema = TaskOutputDataSchema.model_validate(schema_example)
7777
outputs_file_name = _create_fake_outputs(
7878
task_output_schema, tmp_path, optional_fields_set, faker
7979
)
@@ -94,7 +94,7 @@ def test_create_task_output_from_task_with_optional_fields_as_required(
9494
def test_create_task_output_from_task_throws_when_there_are_missing_files(
9595
tmp_path: Path, faker: Faker
9696
):
97-
task_output_schema = TaskOutputDataSchema.parse_obj(
97+
task_output_schema = TaskOutputDataSchema.model_validate(
9898
{
9999
"required_file_output": {
100100
"required": True,
@@ -115,7 +115,7 @@ def test_create_task_output_from_task_throws_when_there_are_missing_files(
115115
def test_create_task_output_from_task_does_not_throw_when_there_are_optional_missing_files(
116116
tmp_path: Path, faker: Faker
117117
):
118-
task_output_schema = TaskOutputDataSchema.parse_obj(
118+
task_output_schema = TaskOutputDataSchema.model_validate(
119119
{
120120
"optional_file_output": {
121121
"required": False,
@@ -136,7 +136,7 @@ def test_create_task_output_from_task_does_not_throw_when_there_are_optional_mis
136136
def test_create_task_output_from_task_throws_when_there_are_entries(
137137
tmp_path: Path, faker: Faker
138138
):
139-
task_output_schema = TaskOutputDataSchema.parse_obj(
139+
task_output_schema = TaskOutputDataSchema.model_validate(
140140
{
141141
"some_output": {
142142
"required": True,
@@ -155,7 +155,7 @@ def test_create_task_output_from_task_throws_when_there_are_entries(
155155
def test_create_task_output_from_task_does_not_throw_when_there_are_optional_entries(
156156
tmp_path: Path, faker: Faker
157157
):
158-
task_output_schema = TaskOutputDataSchema.parse_obj(
158+
task_output_schema = TaskOutputDataSchema.model_validate(
159159
{
160160
"some_output": {
161161
"required": False,
@@ -184,6 +184,6 @@ def test_objects_are_compatible_with_dask_requirements(model_cls, model_cls_exam
184184
for name, example in model_cls_examples.items():
185185
print(name, ":", pformat(example))
186186

187-
model_instance = model_cls.parse_obj(example)
187+
model_instance = model_cls.model_validate(example)
188188
reloaded_instance = loads(dumps(model_instance))
189189
assert reloaded_instance == model_instance

packages/models-library/src/models_library/errors_classes.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
from typing import Any
2+
13
from pydantic.errors import PydanticErrorMixin
24

35

@@ -14,8 +16,8 @@ def __new__(cls, *_args, **_kwargs):
1416
cls.code = cls._get_full_class_name() # type: ignore[assignment]
1517
return super().__new__(cls)
1618

17-
def __init__(self, *_args, **kwargs) -> None:
18-
self.__dict__ = kwargs
19+
def __init__(self, **ctx: Any) -> None:
20+
self.__dict__ = ctx
1921
super().__init__(message=self._build_message(), code=self.code)
2022

2123
def __str__(self) -> str:

packages/models-library/src/models_library/projects_nodes_io.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313

1414
from models_library.basic_types import ConstrainedStr, KeyIDStr
1515
from pydantic import (
16+
AfterValidator,
1617
AnyUrl,
1718
BaseModel,
1819
ConfigDict,
@@ -40,8 +41,9 @@
4041
LocationName = str
4142

4243

43-
class SimcoreS3FileID(ConstrainedStr):
44-
pattern: re.Pattern[str] | None = re.compile(SIMCORE_S3_FILE_ID_RE)
44+
SimcoreS3FileID: TypeAlias = Annotated[
45+
str, StringConstraints(pattern=SIMCORE_S3_FILE_ID_RE)
46+
]
4547

4648

4749
class SimcoreS3DirectoryID(ConstrainedStr):
@@ -87,9 +89,7 @@ def from_simcore_s3_object(cls, s3_object: str) -> "SimcoreS3DirectoryID":
8789
return TypeAdapter(cls).validate_python(f"{parent_path}/")
8890

8991

90-
class DatCoreFileID(ConstrainedStr):
91-
regex: re.Pattern[str] | None = re.compile(DATCORE_FILE_ID_RE)
92-
92+
DatCoreFileID: TypeAlias = Annotated[str, StringConstraints(pattern=DATCORE_FILE_ID_RE)]
9393

9494
StorageFileID: TypeAlias = SimcoreS3FileID | DatCoreFileID
9595

@@ -123,7 +123,9 @@ class PortLink(BaseModel):
123123
class DownloadLink(BaseModel):
124124
"""I/O port type to hold a generic download link to a file (e.g. S3 pre-signed link, etc)"""
125125

126-
download_link: Annotated[str, AnyUrl] = Field(..., alias="downloadLink")
126+
download_link: Annotated[AnyUrl, AfterValidator(str)] = Field(
127+
..., alias="downloadLink"
128+
)
127129
label: str | None = Field(default=None, description="Display name")
128130
model_config = ConfigDict(
129131
extra="forbid",
@@ -145,11 +147,13 @@ class BaseFileLink(BaseModel):
145147
store: LocationID = Field(
146148
...,
147149
description="The store identifier: 0 for simcore S3, 1 for datcore",
150+
validate_default=True,
148151
)
149152

150153
path: StorageFileID = Field(
151154
...,
152155
description="The path to the file in the storage provider domain",
156+
union_mode="left_to_right",
153157
)
154158

155159
label: str | None = Field(

packages/models-library/src/models_library/utils/nodes.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from copy import deepcopy
66
from typing import Any
77

8-
from pydantic import BaseModel
8+
from pydantic import BaseModel, TypeAdapter
99

1010
from ..projects import Project
1111
from ..projects_nodes_io import NodeID, PortLink, UUIDStr
@@ -20,7 +20,7 @@ def project_node_io_payload_cb(
2020

2121
async def node_io_payload_cb(node_id: NodeID) -> dict[str, Any]:
2222
node_io_payload: dict[str, Any] = {"inputs": None, "outputs": None}
23-
node = project.workbench.get(UUIDStr(node_id))
23+
node = project.workbench.get(TypeAdapter(UUIDStr).validate_python(node_id))
2424
if node:
2525
node_io_payload = {"inputs": node.inputs, "outputs": node.outputs}
2626

packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ async def add(
8383
[
8484
{
8585
"project_uuid": f"{self.project_uuid}",
86-
**node.dict(),
86+
**node.model_dump(),
8787
}
8888
for node in nodes
8989
]

packages/postgres-database/tests/test_utils_projects_nodes.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -412,9 +412,9 @@ async def test_get_project_id_from_node_id_raises_if_multiple_projects_with_same
412412
assert len(project1_nodes) == 1
413413
project2_nodes = await project2_repo.add(connection, nodes=[shared_node])
414414
assert len(project2_nodes) == 1
415-
assert project1_nodes[0].dict(
415+
assert project1_nodes[0].model_dump(
416416
include=ProjectNodeCreate.get_field_names(exclude={"created", "modified"})
417-
) == project2_nodes[0].dict(
417+
) == project2_nodes[0].model_dump(
418418
include=ProjectNodeCreate.get_field_names(exclude={"created", "modified"})
419419
)
420420
with pytest.raises(ProjectNodesNonUniqueNodeFoundError):

packages/pytest-simcore/src/pytest_simcore/docker_registry.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -106,9 +106,9 @@ def external_registry_settings(
106106
if external_envfile_dict:
107107
config = {
108108
field: external_envfile_dict.get(field, None)
109-
for field in RegistrySettings.__fields__
109+
for field in RegistrySettings.model_fields
110110
}
111-
return RegistrySettings.parse_obj(config)
111+
return RegistrySettings.model_validate(config)
112112
return None
113113

114114

packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py

Lines changed: 7 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,15 @@
11
from typing import Literal
22

3-
from pydantic import BaseModel, Field, root_validator, validator
3+
from pydantic import field_validator, model_validator, ConfigDict, BaseModel, Field
44

55
from .httpx_calls_capture_errors import OpenApiSpecError
66

77

88
class CapturedParameterSchema(BaseModel):
9-
title: str | None
10-
type_: Literal["str", "int", "float", "bool"] | None = Field(
11-
None, alias="type", optional=True
12-
)
9+
title: str | None = None
10+
type_: Literal["str", "int", "float", "bool"] | None = Field(None, alias="type")
1311
pattern: str | None
14-
format_: Literal["uuid"] | None = Field(None, alias="format", optional=True)
12+
format_: Literal["uuid"] | None = Field(None, alias="format")
1513
exclusiveMinimum: bool | None
1614
minimum: int | None
1715
anyOf: list["CapturedParameterSchema"] | None
@@ -22,7 +20,7 @@ class Config:
2220
validate_always = True
2321
allow_population_by_field_name = True
2422

25-
@validator("type_", pre=True)
23+
@field_validator("type_", mode="before")
2624
@classmethod
2725
def preprocess_type_(cls, val):
2826
if val == "string":
@@ -33,7 +31,7 @@ def preprocess_type_(cls, val):
3331
val = "bool"
3432
return val
3533

36-
@root_validator(pre=False)
34+
@model_validator(mode="after")
3735
@classmethod
3836
def check_compatibility(cls, values):
3937
type_ = values.get("type_")
@@ -100,10 +98,7 @@ class CapturedParameter(BaseModel):
10098
response_value: str | None = (
10199
None # attribute for storing the params value in a concrete response
102100
)
103-
104-
class Config:
105-
validate_always = True
106-
allow_population_by_field_name = True
101+
model_config = ConfigDict(validate_default=True, populate_by_name=True)
107102

108103
def __hash__(self):
109104
return hash(

packages/pytest-simcore/src/pytest_simcore/minio_service.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,5 +31,5 @@ def minio_s3_settings_envs(
3131
minio_s3_settings: S3Settings,
3232
monkeypatch: pytest.MonkeyPatch,
3333
) -> EnvVarsDict:
34-
changed_envs: EnvVarsDict = minio_s3_settings.dict(exclude_unset=True)
34+
changed_envs: EnvVarsDict = minio_s3_settings.model_dump(exclude_unset=True)
3535
return setenvs_from_dict(monkeypatch, changed_envs)

0 commit comments

Comments
 (0)