diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 72247efd..43dd59b9 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -22,8 +22,6 @@ updates: ignore: # https://github.com/sphinx-contrib/sphinxcontrib-towncrier/issues/92 - dependency-name: towncrier - # https://github.com/pydantic/pydantic/issues/10964 - - dependency-name: pydantic # https://til.simonwillison.net/github/dependabot-python-setup groups: python-packages: diff --git a/data_rentgen/consumer/openlineage/run.py b/data_rentgen/consumer/openlineage/run.py index 883f1b64..9dd2e3bf 100644 --- a/data_rentgen/consumer/openlineage/run.py +++ b/data_rentgen/consumer/openlineage/run.py @@ -5,7 +5,7 @@ from data_rentgen.consumer.openlineage.base import OpenLineageBase from data_rentgen.consumer.openlineage.run_facets import OpenLineageRunFacets -from data_rentgen.utils import UUID +from data_rentgen.utils import UUIDv6Plus class OpenLineageRun(OpenLineageBase): @@ -13,5 +13,5 @@ class OpenLineageRun(OpenLineageBase): See [Run](https://github.com/OpenLineage/OpenLineage/blob/main/spec/OpenLineage.json). """ - runId: UUID + runId: UUIDv6Plus facets: OpenLineageRunFacets = Field(default_factory=OpenLineageRunFacets) diff --git a/data_rentgen/consumer/openlineage/run_facets/parent_run.py b/data_rentgen/consumer/openlineage/run_facets/parent_run.py index 8f25ecfb..5ae0a6a6 100644 --- a/data_rentgen/consumer/openlineage/run_facets/parent_run.py +++ b/data_rentgen/consumer/openlineage/run_facets/parent_run.py @@ -3,7 +3,7 @@ from data_rentgen.consumer.openlineage.base import OpenLineageBase from data_rentgen.consumer.openlineage.run_facets.base import OpenLineageRunFacet -from data_rentgen.utils import UUID +from data_rentgen.utils import UUIDv6Plus class OpenLineageParentJob(OpenLineageBase): @@ -20,7 +20,7 @@ class OpenLineageParentRun(OpenLineageBase): See [ParentRunFacet](https://github.com/OpenLineage/OpenLineage/blob/main/spec/facets/ParentRunFacet.json). """ - runId: UUID + runId: UUIDv6Plus class OpenLineageParentRunFacet(OpenLineageRunFacet): diff --git a/data_rentgen/db/repositories/operation.py b/data_rentgen/db/repositories/operation.py index 644c4432..32376f1f 100644 --- a/data_rentgen/db/repositories/operation.py +++ b/data_rentgen/db/repositories/operation.py @@ -3,6 +3,7 @@ from datetime import datetime, timezone from typing import Sequence +from uuid import UUID from sqlalchemy import Row, any_, func, select from sqlalchemy.dialects.postgresql import insert @@ -11,7 +12,6 @@ from data_rentgen.db.repositories.base import Repository from data_rentgen.db.utils.uuid import extract_timestamp_from_uuid from data_rentgen.dto import OperationDTO, PaginationDTO -from data_rentgen.utils import UUID class OperationRepository(Repository[Operation]): diff --git a/data_rentgen/server/providers/auth/dummy_provider.py b/data_rentgen/server/providers/auth/dummy_provider.py index 5969bcb2..34c6e86a 100644 --- a/data_rentgen/server/providers/auth/dummy_provider.py +++ b/data_rentgen/server/providers/auth/dummy_provider.py @@ -31,7 +31,9 @@ def __init__( @classmethod def setup(cls, app: FastAPI) -> FastAPI: - settings = DummyAuthProviderSettings.model_validate(app.state.settings.auth.dict(exclude={"provider"})) + settings = DummyAuthProviderSettings.model_validate( + app.state.settings.auth.model_dump(exclude={"provider"}), + ) logger.info("Using %s provider with settings:\n%s", cls.__name__, pformat(settings)) app.dependency_overrides[AuthProvider] = cls app.dependency_overrides[DummyAuthProviderSettings] = lambda: settings diff --git a/data_rentgen/server/providers/auth/keycloak_provider.py b/data_rentgen/server/providers/auth/keycloak_provider.py index d35d42dd..9b015690 100644 --- a/data_rentgen/server/providers/auth/keycloak_provider.py +++ b/data_rentgen/server/providers/auth/keycloak_provider.py @@ -38,7 +38,7 @@ def __init__( @classmethod def setup(cls, app: FastAPI) -> FastAPI: settings = KeycloakAuthProviderSettings.model_validate( - app.state.settings.auth.dict(exclude={"provider"}), + app.state.settings.auth.model_dump(exclude={"provider"}), ) logger.info("Using %s provider with settings:\n%s", cls.__name__, settings) app.dependency_overrides[AuthProvider] = cls diff --git a/data_rentgen/server/schemas/v1/__init__.py b/data_rentgen/server/schemas/v1/__init__.py index f11aae9e..b6e5ac56 100644 --- a/data_rentgen/server/schemas/v1/__init__.py +++ b/data_rentgen/server/schemas/v1/__init__.py @@ -18,8 +18,12 @@ LineageEntityKindV1, LineageEntityV1, LineageInputRelationV1, + LineageIORelationSchemaFieldV1, + LineageIORelationSchemaV1, + LineageNodesResponseV1, LineageOutputRelationV1, LineageParentRelationV1, + LineageRelationsResponseV1, LineageResponseV1, LineageSymlinkRelationV1, OperationLineageQueryV1, @@ -68,12 +72,16 @@ "DatasetPaginateQueryV1", "DatasetResponseV1", "LineageDirectionV1", - "LineageEntityV1", "LineageEntityKindV1", - "LineageResponseV1", + "LineageEntityV1", "LineageInputRelationV1", + "LineageIORelationSchemaFieldV1", + "LineageIORelationSchemaV1", + "LineageNodesResponseV1", "LineageOutputRelationV1", "LineageParentRelationV1", + "LineageRelationsResponseV1", + "LineageResponseV1", "LineageSymlinkRelationV1", "LocationDetailedResponseV1", "LocationPaginateQueryV1", diff --git a/data_rentgen/server/schemas/v1/dataset.py b/data_rentgen/server/schemas/v1/dataset.py index cb2910ba..eb11ecde 100644 --- a/data_rentgen/server/schemas/v1/dataset.py +++ b/data_rentgen/server/schemas/v1/dataset.py @@ -2,8 +2,6 @@ # SPDX-License-Identifier: Apache-2.0 from __future__ import annotations -from typing import Literal - from fastapi import Query from pydantic import BaseModel, ConfigDict, Field @@ -12,8 +10,7 @@ class DatasetResponseV1(BaseModel): - kind: Literal["DATASET"] = "DATASET" - id: int = Field(description="Dataset id") + id: str = Field(description="Dataset id", coerce_numbers_to_str=True) location: LocationResponseV1 = Field(description="Corresponding Location") name: str = Field(description="Dataset name") format: str | None = Field(description="Data format", default=None) diff --git a/data_rentgen/server/schemas/v1/job.py b/data_rentgen/server/schemas/v1/job.py index 22e472c2..f3ba4e4e 100644 --- a/data_rentgen/server/schemas/v1/job.py +++ b/data_rentgen/server/schemas/v1/job.py @@ -1,6 +1,5 @@ # SPDX-FileCopyrightText: 2024-2025 MTS PJSC # SPDX-License-Identifier: Apache-2.0 -from typing import Literal from fastapi import Query from pydantic import BaseModel, ConfigDict, Field @@ -12,8 +11,7 @@ class JobResponseV1(BaseModel): """Job response""" - kind: Literal["JOB"] = "JOB" - id: int = Field(description="Job id") + id: str = Field(description="Job id", coerce_numbers_to_str=True) location: LocationResponseV1 = Field(description="Corresponding Location") name: str = Field(description="Job name") type: str = Field(description="Job type") diff --git a/data_rentgen/server/schemas/v1/lineage.py b/data_rentgen/server/schemas/v1/lineage.py index 7e6f0df7..a4e43703 100644 --- a/data_rentgen/server/schemas/v1/lineage.py +++ b/data_rentgen/server/schemas/v1/lineage.py @@ -3,6 +3,7 @@ from datetime import datetime from enum import Enum from typing import Literal +from uuid import UUID from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator @@ -10,7 +11,7 @@ from data_rentgen.server.schemas.v1.job import JobResponseV1 from data_rentgen.server.schemas.v1.operation import OperationResponseV1 from data_rentgen.server.schemas.v1.run import RunResponseV1 -from data_rentgen.utils import UUID +from data_rentgen.utils import UUIDv6Plus class LineageEntityKindV1(str, Enum): @@ -34,9 +35,9 @@ def __str__(self) -> str: class LineageEntityV1(BaseModel): kind: LineageEntityKindV1 = Field(description="Type of Lineage entity") - id: int | UUID = Field(description="Id of Lineage entity") + id: str | UUID = Field(description="Id of Lineage entity") - model_config = ConfigDict(from_attributes=True, use_enum_values=True) + model_config = ConfigDict(from_attributes=True) class BaseLineageQueryV1(BaseModel): @@ -94,11 +95,11 @@ class JobLineageQueryV1(BaseLineageQueryV1): class OperationLineageQueryV1(BaseLineageQueryV1): - start_node_id: UUID = Field(description="Operation id", examples=["00000000-0000-0000-0000-000000000000"]) + start_node_id: UUIDv6Plus = Field(description="Operation id", examples=["00000000-0000-0000-0000-000000000000"]) class RunLineageQueryV1(BaseLineageQueryV1): - start_node_id: UUID = Field(description="Run id", examples=["00000000-0000-0000-0000-000000000000"]) + start_node_id: UUIDv6Plus = Field(description="Run id", examples=["00000000-0000-0000-0000-000000000000"]) granularity: Literal["OPERATION", "RUN"] = Field( default="RUN", description="Granularity of the run lineage", @@ -107,36 +108,34 @@ class RunLineageQueryV1(BaseLineageQueryV1): class LineageParentRelationV1(BaseModel): - kind: Literal["PARENT"] = "PARENT" from_: LineageEntityV1 = Field(description="Start point of relation", serialization_alias="from") to: LineageEntityV1 = Field(description="End point of relation") -class LineageOutputRelationSchemaFieldV1(BaseModel): +class LineageIORelationSchemaFieldV1(BaseModel): name: str type: str | None = Field(default=None) description: str | None = Field(default=None) - fields: list["LineageOutputRelationSchemaFieldV1"] = Field(description="Nested fields", default_factory=list) + fields: list["LineageIORelationSchemaFieldV1"] = Field(description="Nested fields", default_factory=list) model_config = ConfigDict(from_attributes=True) -class LineageOutputRelationSchemaV1(BaseModel): - id: int = Field(description="Schema id") - fields: list[LineageOutputRelationSchemaFieldV1] = Field(description="Schema fields") +class LineageIORelationSchemaV1(BaseModel): + id: str = Field(description="Schema id", coerce_numbers_to_str=True) + fields: list[LineageIORelationSchemaFieldV1] = Field(description="Schema fields") model_config = ConfigDict(from_attributes=True) class LineageInputRelationV1(BaseModel): - kind: Literal["INPUT"] = "INPUT" from_: LineageEntityV1 = Field(description="Start point of relation", serialization_alias="from") to: LineageEntityV1 = Field(description="End point of relation") last_interaction_at: datetime = Field(description="Last interaction at", examples=["2008-09-15T15:53:00+05:00"]) num_bytes: int | None = Field(description="Number of bytes", examples=[42], default=None) num_rows: int | None = Field(description="Number of rows", examples=[42], default=None) num_files: int | None = Field(description="Number of files", examples=[42], default=None) - i_schema: LineageOutputRelationSchemaV1 | None = Field( + i_schema: LineageIORelationSchemaV1 | None = Field( description="Schema", default=None, # pydantic models have reserved "schema" attribute, using alias @@ -145,7 +144,6 @@ class LineageInputRelationV1(BaseModel): class LineageOutputRelationV1(BaseModel): - kind: Literal["OUTPUT"] = "OUTPUT" from_: LineageEntityV1 = Field(description="Start point of relation", serialization_alias="from") to: LineageEntityV1 = Field(description="End point of relation") type: str | None = Field(description="Type of relation", examples=["CREATE", "APPEND"], default=None) @@ -153,7 +151,7 @@ class LineageOutputRelationV1(BaseModel): num_bytes: int | None = Field(description="Number of bytes", examples=[42], default=None) num_rows: int | None = Field(description="Number of rows", examples=[42], default=None) num_files: int | None = Field(description="Number of files", examples=[42], default=None) - o_schema: LineageOutputRelationSchemaV1 | None = Field( + o_schema: LineageIORelationSchemaV1 | None = Field( description="Schema", default=None, # pydantic models have reserved "schema" attribute, using alias @@ -162,17 +160,28 @@ class LineageOutputRelationV1(BaseModel): class LineageSymlinkRelationV1(BaseModel): - kind: Literal["SYMLINK"] = "SYMLINK" from_: LineageEntityV1 = Field(description="Start point of relation", serialization_alias="from") to: LineageEntityV1 = Field(description="End point of relation") type: str = Field(description="Type of relation between datasets", examples=["METASTORE", "WAREHOUSE"]) +class LineageRelationsResponseV1(BaseModel): + parents: list[LineageParentRelationV1] = Field(description="Parent relations", default_factory=list) + symlinks: list[LineageSymlinkRelationV1] = Field(description="Symlink relations", default_factory=list) + inputs: list[LineageInputRelationV1] = Field(description="Input relations", default_factory=list) + outputs: list[LineageOutputRelationV1] = Field(description="Input relations", default_factory=list) + + +class LineageNodesResponseV1(BaseModel): + datasets: dict[str, DatasetResponseV1] = Field(description="Dataset nodes", default_factory=dict) + jobs: dict[str, JobResponseV1] = Field(description="Job nodes", default_factory=dict) + runs: dict[UUID, RunResponseV1] = Field(description="Run nodes", default_factory=dict) + operations: dict[UUID, OperationResponseV1] = Field(description="Operation nodes", default_factory=dict) + + class LineageResponseV1(BaseModel): - relations: list[ - LineageParentRelationV1 | LineageInputRelationV1 | LineageOutputRelationV1 | LineageSymlinkRelationV1 - ] = Field(description="List of relations", default_factory=list) - nodes: list[RunResponseV1 | OperationResponseV1 | JobResponseV1 | DatasetResponseV1] = Field( - description="List of nodes", - default_factory=list, + relations: LineageRelationsResponseV1 = Field( + description="Lineage relations", + default_factory=LineageRelationsResponseV1, ) + nodes: LineageNodesResponseV1 = Field(description="Lineage nodes", default_factory=LineageNodesResponseV1) diff --git a/data_rentgen/server/schemas/v1/location.py b/data_rentgen/server/schemas/v1/location.py index 02b52038..57dc65e6 100644 --- a/data_rentgen/server/schemas/v1/location.py +++ b/data_rentgen/server/schemas/v1/location.py @@ -8,7 +8,7 @@ class LocationResponseV1(BaseModel): - id: int = Field(description="Location id") + id: str = Field(description="Location id", coerce_numbers_to_str=True) type: str = Field(description="Location type, e.g kafka, hdfs, postgres") name: str = Field(description="Location name, e.g. cluster name") addresses: list[AddressResponseV1] = Field(description="List of addresses") diff --git a/data_rentgen/server/schemas/v1/operation.py b/data_rentgen/server/schemas/v1/operation.py index 5b0a53cf..fe75c46b 100644 --- a/data_rentgen/server/schemas/v1/operation.py +++ b/data_rentgen/server/schemas/v1/operation.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 from datetime import datetime from enum import IntEnum -from typing import Literal +from uuid import UUID from fastapi import Query from pydantic import ( @@ -16,7 +16,7 @@ ) from data_rentgen.server.schemas.v1.pagination import PaginateQueryV1 -from data_rentgen.utils import UUID +from data_rentgen.utils import UUIDv6Plus class OperationStatusV1(IntEnum): @@ -42,7 +42,6 @@ def __str__(self) -> str: class OperationResponseV1(BaseModel): """Operation response.""" - kind: Literal["OPERATION"] = "OPERATION" id: UUID = Field(description="Operation id") created_at: datetime = Field(description="Operation creation time") run_id: UUID = Field(description="Run operation belongs to") @@ -112,13 +111,13 @@ class OperationQueryV1(PaginateQueryV1): examples=["2008-09-15T15:53:00+05:00"], ), ) - operation_id: list[UUID] = Field( + operation_id: list[UUIDv6Plus] = Field( Query( default_factory=list, description="Operation ids, for exact match", ), ) - run_id: UUID | None = Field( + run_id: UUIDv6Plus | None = Field( Query( default=None, description="Run id, can be used only with 'since'", diff --git a/data_rentgen/server/schemas/v1/run.py b/data_rentgen/server/schemas/v1/run.py index c07a8979..87382995 100644 --- a/data_rentgen/server/schemas/v1/run.py +++ b/data_rentgen/server/schemas/v1/run.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 from datetime import datetime from enum import IntEnum -from typing import Literal +from uuid import UUID from fastapi import Query from pydantic import ( @@ -17,7 +17,7 @@ from data_rentgen.server.schemas.v1.pagination import PaginateQueryV1 from data_rentgen.server.schemas.v1.user import UserResponseV1 -from data_rentgen.utils import UUID +from data_rentgen.utils import UUIDv6Plus class RunStatusV1(IntEnum): @@ -43,10 +43,9 @@ def __str__(self) -> str: class RunResponseV1(BaseModel): """Run response""" - kind: Literal["RUN"] = "RUN" id: UUID = Field(description="Run id") created_at: datetime = Field(description="Run creation time") - job_id: int = Field(description="Job the run is associated with") + job_id: str = Field(description="Job the run is associated with", coerce_numbers_to_str=True) parent_run_id: UUID | None = Field(description="Parent of current run", default=None) status: RunStatusV1 = Field(description="Run status") external_id: str | None = Field(description="External id, e.g. Spark applicationid", default=None) @@ -127,7 +126,7 @@ class RunsQueryV1(PaginateQueryV1): examples=["2008-09-15T15:53:00+05:00"], ), ) - run_id: list[UUID] = Field( + run_id: list[UUIDv6Plus] = Field( Query( default_factory=list, description="Run ids, for exact match", @@ -140,7 +139,7 @@ class RunsQueryV1(PaginateQueryV1): ), ) - parent_run_id: UUID | None = Field( + parent_run_id: UUIDv6Plus | None = Field( Query( default=None, description="Parent run id, can be used only with 'since' and 'until'", diff --git a/data_rentgen/server/settings/auth/__init__.py b/data_rentgen/server/settings/auth/__init__.py index 60a0bac8..e6ea8cfc 100644 --- a/data_rentgen/server/settings/auth/__init__.py +++ b/data_rentgen/server/settings/auth/__init__.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: 2024-2025 MTS PJSC # SPDX-License-Identifier: Apache-2.0 -from pydantic import BaseModel, Field, ImportString +from pydantic import BaseModel, ConfigDict, Field, ImportString class AuthSettings(BaseModel): @@ -25,5 +25,4 @@ class AuthSettings(BaseModel): validate_default=True, ) - class Config: - extra = "allow" + model_config = ConfigDict(extra="allow") diff --git a/data_rentgen/server/settings/session.py b/data_rentgen/server/settings/session.py index c3c368c2..88b3c2e6 100644 --- a/data_rentgen/server/settings/session.py +++ b/data_rentgen/server/settings/session.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: 2024-2025 MTS PJSC # SPDX-License-Identifier: Apache-2.0 -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field DEFAULT_MAX_AGE = 1_209_600 @@ -55,5 +55,4 @@ class SessionSettings(BaseModel): description="Domain for sharing cookies between subdomains or cross-domains.", ) - class Config: - extra = "allow" + model_config = ConfigDict(extra="allow") diff --git a/data_rentgen/server/utils/lineage_response.py b/data_rentgen/server/utils/lineage_response.py index b60a2e89..e7904550 100644 --- a/data_rentgen/server/utils/lineage_response.py +++ b/data_rentgen/server/utils/lineage_response.py @@ -1,14 +1,22 @@ # SPDX-FileCopyrightText: 2024-2025 MTS PJSC # SPDX-License-Identifier: Apache-2.0 -from typing import Iterable +from typing import Any +from uuid6 import UUID + +from data_rentgen.db.models.dataset_symlink import DatasetSymlink +from data_rentgen.db.models.input import Input +from data_rentgen.db.models.operation import Operation +from data_rentgen.db.models.output import Output +from data_rentgen.db.models.run import Run from data_rentgen.server.schemas.v1 import ( DatasetResponseV1, JobResponseV1, LineageEntityKindV1, LineageEntityV1, LineageInputRelationV1, + LineageNodesResponseV1, LineageOutputRelationV1, LineageParentRelationV1, LineageResponseV1, @@ -16,101 +24,116 @@ OperationResponseV1, RunResponseV1, ) +from data_rentgen.server.schemas.v1.lineage import ( + LineageIORelationSchemaV1, + LineageRelationsResponseV1, +) from data_rentgen.server.services.lineage import LineageServiceResult async def build_lineage_response(lineage: LineageServiceResult) -> LineageResponseV1: - response = LineageResponseV1() - for job_id in sorted(lineage.jobs): - job = lineage.jobs[job_id] - response.nodes.append(JobResponseV1.model_validate(job)) - - for dataset_id in sorted(lineage.datasets): - dataset = lineage.datasets[dataset_id] - response.nodes.append(DatasetResponseV1.model_validate(dataset)) - - for run_id in sorted(lineage.runs): - run = lineage.runs[run_id] - response.relations.append( - LineageParentRelationV1( - from_=LineageEntityV1(kind=LineageEntityKindV1.JOB, id=run.job_id), - to=LineageEntityV1(kind=LineageEntityKindV1.RUN, id=run.id), - ), + datasets = {str(dataset.id): DatasetResponseV1.model_validate(dataset) for dataset in lineage.datasets.values()} + jobs = {str(job.id): JobResponseV1.model_validate(job) for job in lineage.jobs.values()} + runs = {run.id: RunResponseV1.model_validate(run) for run in lineage.runs.values()} + operations = {op.id: OperationResponseV1.model_validate(op) for op in lineage.operations.values()} + + return LineageResponseV1( + nodes=LineageNodesResponseV1( + jobs=jobs, + datasets=datasets, + runs=runs, # type: ignore[assignment] + operations=operations, # type: ignore[assignment] + ), + relations=LineageRelationsResponseV1( + parents=_get_run_parent_relations(lineage.runs) + _get_operation_parent_relations(lineage.operations), + symlinks=_get_symlink_relations(lineage.dataset_symlinks), + inputs=_get_input_relations(lineage.inputs), + outputs=_get_output_relations(lineage.outputs), + ), + ) + + +def _get_run_parent_relations(runs: dict[UUID, Run]) -> list[LineageParentRelationV1]: + parents = [] + for run_id in sorted(runs): + run = runs[run_id] + relation = LineageParentRelationV1( + from_=LineageEntityV1(kind=LineageEntityKindV1.JOB, id=str(run.job_id)), + to=LineageEntityV1(kind=LineageEntityKindV1.RUN, id=run.id), ) - response.nodes.append(RunResponseV1.model_validate(run)) - - for operation_id in sorted(lineage.operations): - operation = lineage.operations[operation_id] - response.relations.append( - LineageParentRelationV1( - from_=LineageEntityV1(kind=LineageEntityKindV1.RUN, id=operation.run_id), - to=LineageEntityV1(kind=LineageEntityKindV1.OPERATION, id=operation.id), - ), + parents.append(relation) + return parents + + +def _get_operation_parent_relations(operations: dict[UUID, Operation]) -> list[LineageParentRelationV1]: + parents = [] + for operation_id in sorted(operations): + operation = operations[operation_id] + relation = LineageParentRelationV1( + from_=LineageEntityV1(kind=LineageEntityKindV1.RUN, id=operation.run_id), + to=LineageEntityV1(kind=LineageEntityKindV1.OPERATION, id=operation.id), ) - response.nodes.append(OperationResponseV1.model_validate(operation)) + parents.append(relation) + return parents + - for symlink_id in sorted(lineage.dataset_symlinks): - dataset_symlink = lineage.dataset_symlinks[symlink_id] +def _get_symlink_relations(dataset_symlinks: dict[Any, DatasetSymlink]) -> list[LineageSymlinkRelationV1]: + symlinks = [] + for key in sorted(dataset_symlinks): + dataset_symlink = dataset_symlinks[key] relation = LineageSymlinkRelationV1( type=dataset_symlink.type, - from_=LineageEntityV1(kind=LineageEntityKindV1.DATASET, id=dataset_symlink.from_dataset_id), - to=LineageEntityV1(kind=LineageEntityKindV1.DATASET, id=dataset_symlink.to_dataset_id), + from_=LineageEntityV1(kind=LineageEntityKindV1.DATASET, id=str(dataset_symlink.from_dataset_id)), + to=LineageEntityV1(kind=LineageEntityKindV1.DATASET, id=str(dataset_symlink.to_dataset_id)), ) - response.relations.append(relation) + symlinks.append(relation) + return symlinks - input_relations = await _add_input_relations(lineage.inputs.values()) - response.relations.extend(input_relations) - output_relations = await _add_output_relations(lineage.outputs.values()) - response.relations.extend(output_relations) - - return response - - -async def _add_input_relations( - inputs: Iterable, -) -> list[LineageInputRelationV1]: +def _get_input_relations(inputs: dict[Any, Input]) -> list[LineageInputRelationV1]: relations = [] - for input in inputs: + for input in inputs.values(): if input.operation_id is not None: to = LineageEntityV1(kind=LineageEntityKindV1.OPERATION, id=input.operation_id) elif input.run_id is not None: to = LineageEntityV1(kind=LineageEntityKindV1.RUN, id=input.run_id) elif input.job_id is not None: - to = LineageEntityV1(kind=LineageEntityKindV1.JOB, id=input.job_id) + to = LineageEntityV1(kind=LineageEntityKindV1.JOB, id=str(input.job_id)) + relation = LineageInputRelationV1( - from_=LineageEntityV1(kind=LineageEntityKindV1.DATASET, id=input.dataset_id), + from_=LineageEntityV1(kind=LineageEntityKindV1.DATASET, id=str(input.dataset_id)), to=to, last_interaction_at=input.created_at, num_bytes=input.num_bytes, num_rows=input.num_rows, num_files=input.num_files, - i_schema=input.schema, + i_schema=LineageIORelationSchemaV1.model_validate(input.schema) if input.schema else None, ) relations.append(relation) + return sorted(relations, key=lambda x: (x.from_.id, x.to.id)) -async def _add_output_relations( - outputs: Iterable, -) -> list[LineageOutputRelationV1]: +def _get_output_relations(outputs: dict[Any, Output]) -> list[LineageOutputRelationV1]: relations = [] - for output in outputs: + for output in outputs.values(): if output.operation_id is not None: from_ = LineageEntityV1(kind=LineageEntityKindV1.OPERATION, id=output.operation_id) elif output.run_id is not None: from_ = LineageEntityV1(kind=LineageEntityKindV1.RUN, id=output.run_id) elif output.job_id is not None: - from_ = LineageEntityV1(kind=LineageEntityKindV1.JOB, id=output.job_id) + from_ = LineageEntityV1(kind=LineageEntityKindV1.JOB, id=str(output.job_id)) + relation = LineageOutputRelationV1( type=output.type, from_=from_, - to=LineageEntityV1(kind=LineageEntityKindV1.DATASET, id=output.dataset_id), + to=LineageEntityV1(kind=LineageEntityKindV1.DATASET, id=str(output.dataset_id)), last_interaction_at=output.created_at, num_bytes=output.num_bytes, num_rows=output.num_rows, num_files=output.num_files, - o_schema=output.schema, + o_schema=LineageIORelationSchemaV1.model_validate(output.schema) if output.schema else None, ) relations.append(relation) + return sorted(relations, key=lambda x: (x.from_.id, x.to.id, x.type)) diff --git a/data_rentgen/utils/__init__.py b/data_rentgen/utils/__init__.py index ab8c169c..eadf84f3 100644 --- a/data_rentgen/utils/__init__.py +++ b/data_rentgen/utils/__init__.py @@ -1,5 +1,5 @@ # SPDX-FileCopyrightText: 2024-2025 MTS PJSC # SPDX-License-Identifier: Apache-2.0 -from data_rentgen.utils.uuid import UUID +from data_rentgen.utils.uuid import UUIDv6Plus -__all__ = ["UUID"] +__all__ = ["UUIDv6Plus"] diff --git a/data_rentgen/utils/uuid.py b/data_rentgen/utils/uuid.py index 4af88350..358bbdcc 100644 --- a/data_rentgen/utils/uuid.py +++ b/data_rentgen/utils/uuid.py @@ -1,7 +1,7 @@ # SPDX-FileCopyrightText: 2024-2025 MTS PJSC # SPDX-License-Identifier: Apache-2.0 from typing import Any -from uuid import UUID as OLD_UUID +from uuid import UUID as PlainUUID from pydantic import PlainValidator from typing_extensions import Annotated @@ -19,7 +19,7 @@ def uuid_version_validator(run_id: Any) -> UUIDv7: # Teach Pydantic how to parse and represent UUID v7 # Right now use uuid from uuid lib cause: https://github.com/tiangolo/fastapi/issues/10259 -UUID = Annotated[ - OLD_UUID, +UUIDv6Plus = Annotated[ + PlainUUID, PlainValidator(uuid_version_validator), ] diff --git a/docs/changelog/next_release/164.breaking.rst b/docs/changelog/next_release/164.breaking.rst new file mode 100644 index 00000000..297a831a --- /dev/null +++ b/docs/changelog/next_release/164.breaking.rst @@ -0,0 +1,113 @@ +Change response schema of ``GET /:entity/lineage`` from: + +.. code:: python + + { + "relations": [ + { + "kind": "PARENT", + "from": {"kind": "JOB", "id": 123}, + "to": {"kind": "RUN", "id": "00000000-0000-0000-0000-000000000000"}, + }, + { + "kind": "SYMLINK", + "from": {"kind": "DATASET", "id": 234}, + "to": {"kind": "DATASET", "id": 999}, + }, + { + "kind": "INPUT", + "from": {"kind": "DATASET", "id": 234}, + "to": {"kind": "OPERATION", "id": "11111111-1111-1111-1111-111111111111"}, + }, + { + "kind": "OUTPUT", + "from": {"kind": "OPERATION", "id": "11111111-1111-1111-1111-111111111111"}, + "to": {"kind": "DATASET", "id": 234}, + }, + ], + "nodes": [ + {"kind": "DATASET", "id": 123}, + {"kind": "JOB", "id": 234}, + {"kind": "RUN", "id": "00000000-0000-0000-0000-000000000000"}, + {"kind": "OPERATION", "id": "11111111-1111-1111-1111-111111111111"}, + ], + } + +to: + +.. code:: python + + { + "relations": { + "parents": [ + { + "from": {"kind": "JOB", "id": "123"}, + "to": {"kind": "RUN", "id": "00000000-0000-0000-0000-000000000000"}, + }, + ], + "symlinks": [ + { + "from": {"kind": "DATASET", "id": "234"}, + "to": {"kind": "DATASET", "id": "999"}, + }, + ], + "inputs": [ + { + "from": {"kind": "DATASET", "id": "234"}, + "to": { + "kind": "OPERATION", + "id": "11111111-1111-1111-1111-111111111111", + }, + }, + ], + "outputs": [ + { + "from": { + "kind": "OPERATION", + "id": "11111111-1111-1111-1111-111111111111", + }, + "to": {"kind": "DATASET", "id": "234"}, + }, + ], + }, + "nodes": { + "datasets": { + "123": {"id": "123"}, + }, + "jobs": { + "234": {"id": "234"}, + }, + "runs": { + "00000000-0000-0000-0000-000000000000": { + "id": "00000000-0000-0000-0000-000000000000" + }, + }, + "operations": { + "11111111-1111-1111-1111-111111111111": { + "id": "11111111-1111-1111-1111-111111111111" + }, + }, + }, + } + +Note that dataset, job and location ids in all responses were converted from ints to strings, because in JSON object keys have to be strings. + +This allows to replace filters on UI side with O(n) complexity like: + +.. code:: javascript + + // O(n) + relations.filter((relation) => relation.kind == "INPUT" && relation.from.kind == "DATASET" && relation.from.id == dataset_id) + // again O(n) + nodes.filter((node) => node.kind == "DATASET" && node.id == dataset_id) + +with much more effective ones: + +.. code:: javascript + + // O(n) with much smaller n + relations.inputs.filter((relation) => relation.from.kind == "DATASET" && relation.from.id == dataset_id) + // O(1) + nodes.datasets[dataset_id] + +The size of output JSON is not much different. diff --git a/poetry.lock b/poetry.lock index c718865d..85343ce6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "aiofiles" @@ -6,6 +6,8 @@ version = "24.1.0" description = "File support for asyncio." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, @@ -17,6 +19,8 @@ version = "0.12.0" description = "Kafka integration with asyncio" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"consumer\"" files = [ {file = "aiokafka-0.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da8938eac2153ca767ac0144283b3df7e74bb4c0abc0c9a722f3ae63cfbf3a42"}, {file = "aiokafka-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a5c827c8883cfe64bc49100de82862225714e1853432df69aba99f135969bb1b"}, @@ -69,6 +73,7 @@ version = "1.0.0" description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.10" +groups = ["docs"] files = [ {file = "alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b"}, {file = "alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e"}, @@ -80,6 +85,8 @@ version = "1.14.1" description = "A database migration tool for SQLAlchemy." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"server\" or extra == \"consumer\"" files = [ {file = "alembic-1.14.1-py3-none-any.whl", hash = "sha256:1acdd7a3a478e208b0503cd73614d5e4c6efafa4e73518bb60e4f2846a37b1c5"}, {file = "alembic-1.14.1.tar.gz", hash = "sha256:496e888245a53adf1498fcab31713a469c65836f8de76e01399aa1c3e90dd213"}, @@ -99,6 +106,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main", "docs"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -110,6 +118,7 @@ version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["main", "test"] files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -132,6 +141,8 @@ version = "4.3.4" description = "Middleware correlating project logs to individual requests" optional = true python-versions = "<4.0,>=3.8" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "asgi_correlation_id-4.3.4-py3-none-any.whl", hash = "sha256:36ce69b06c7d96b4acb89c7556a4c4f01a972463d3d49c675026cbbd08e9a0a2"}, {file = "asgi_correlation_id-4.3.4.tar.gz", hash = "sha256:ea6bc310380373cb9f731dc2e8b2b6fb978a76afe33f7a2384f697b8d6cd811d"}, @@ -150,6 +161,7 @@ version = "3.0.0" description = "Annotate AST trees with source code positions" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, @@ -165,6 +177,8 @@ version = "0.2.2" description = "Python decorator for async properties." optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "async_property-0.2.2-py2.py3-none-any.whl", hash = "sha256:8924d792b5843994537f8ed411165700b27b2bd966cefc4daeefc1253442a9d7"}, {file = "async_property-0.2.2.tar.gz", hash = "sha256:17d9bd6ca67e27915a75d92549df64b5c7174e9dc806b30a3934dc4ff0506380"}, @@ -176,6 +190,8 @@ version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"postgres\" and python_version < \"3.11.0\" or extra == \"consumer\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, @@ -187,6 +203,8 @@ version = "0.30.0" description = "An asyncio PostgreSQL driver" optional = true python-versions = ">=3.8.0" +groups = ["main"] +markers = "extra == \"postgres\"" files = [ {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, @@ -253,6 +271,7 @@ version = "25.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, @@ -272,6 +291,7 @@ version = "2.2.0" description = "Seamlessly integrate pydantic models in your Sphinx documentation." optional = false python-versions = "<4.0.0,>=3.8.1" +groups = ["docs"] files = [ {file = "autodoc_pydantic-2.2.0-py3-none-any.whl", hash = "sha256:8c6a36fbf6ed2700ea9c6d21ea76ad541b621fbdf16b5a80ee04673548af4d95"}, ] @@ -295,6 +315,7 @@ version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, @@ -309,6 +330,7 @@ version = "1.8.2" description = "Security oriented static analyser for python code." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "bandit-1.8.2-py3-none-any.whl", hash = "sha256:df6146ad73dd30e8cbda4e29689ddda48364e36ff655dbfc86998401fcf1721f"}, {file = "bandit-1.8.2.tar.gz", hash = "sha256:e00ad5a6bc676c0954669fe13818024d66b70e42cf5adb971480cf3b671e835f"}, @@ -333,6 +355,7 @@ version = "4.12.3" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" +groups = ["docs"] files = [ {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, @@ -354,6 +377,7 @@ version = "25.1.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, @@ -400,6 +424,7 @@ version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "docs", "test"] files = [ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, @@ -411,6 +436,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -480,6 +506,7 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] +markers = {main = "extra == \"server\" and platform_python_implementation != \"PyPy\"", dev = "platform_python_implementation != \"PyPy\"", test = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""} [package.dependencies] pycparser = "*" @@ -490,6 +517,7 @@ version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -501,6 +529,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main", "docs"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -595,6 +624,7 @@ files = [ {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] +markers = {main = "extra == \"server\""} [[package]] name = "click" @@ -602,10 +632,12 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "docs"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] +markers = {main = "extra == \"server\" or extra == \"consumer\""} [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} @@ -616,10 +648,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev", "docs", "test"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "extra == \"server\" and platform_system == \"Windows\" or extra == \"consumer\" and platform_system == \"Windows\"", docs = "platform_system == \"Windows\" or sys_platform == \"win32\"", test = "sys_platform == \"win32\""} [[package]] name = "coloredlogs" @@ -627,6 +661,8 @@ version = "15.0.1" description = "Colored terminal output for Python's logging module" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +markers = "extra == \"server\" or extra == \"consumer\"" files = [ {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, @@ -644,6 +680,7 @@ version = "7.6.11" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["test"] files = [ {file = "coverage-7.6.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eafea49da254a8289bed3fab960f808b322eda5577cb17a3733014928bbfbebd"}, {file = "coverage-7.6.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5a3f7cbbcb4ad95067a6525f83a6fc78d9cbc1e70f8abaeeaeaa72ef34f48fc3"}, @@ -713,6 +750,8 @@ version = "2.9.1" description = "Thin Python bindings to de/compression algorithms in Rust" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"consumer\"" files = [ {file = "cramjam-2.9.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8e82464d1e00fbbb12958999b8471ba5e9f3d9711954505a0a7b378762332e6f"}, {file = "cramjam-2.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d2df8a6511cc08ef1fccd2e0c65e2ebc9f57574ec8376052a76851af5398810"}, @@ -815,6 +854,7 @@ version = "44.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main", "dev"] files = [ {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, @@ -848,6 +888,7 @@ files = [ {file = "cryptography-44.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7"}, {file = "cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14"}, ] +markers = {main = "extra == \"server\""} [package.dependencies] cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} @@ -868,6 +909,7 @@ version = "8.2.0" description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "deepdiff-8.2.0-py3-none-any.whl", hash = "sha256:5091f2cdfd372b1b9f6bfd8065ba323ae31118dc4e42594371b38c8bea3fd0a4"}, {file = "deepdiff-8.2.0.tar.gz", hash = "sha256:6ec78f65031485735545ffbe7a61e716c3c2d12ca6416886d5e9291fc76c46c3"}, @@ -886,6 +928,8 @@ version = "2.1.0" description = "A library to handle automated deprecations" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, @@ -900,6 +944,7 @@ version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, @@ -911,6 +956,7 @@ version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, @@ -922,6 +968,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "test"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -936,6 +984,7 @@ version = "2.2.0" description = "Get the currently executing AST node of a frame, and other information" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, @@ -950,6 +999,8 @@ version = "2.4.12" description = "FastDepends - extracted and cleared from HTTP domain logic FastAPI Dependency Injection System. Async and sync are both supported." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"consumer\"" files = [ {file = "fast_depends-2.4.12-py3-none-any.whl", hash = "sha256:9e5d110ddc962329e46c9b35e5fe65655984247a13ee3ca5a33186db7d2d75c2"}, {file = "fast_depends-2.4.12.tar.gz", hash = "sha256:9393e6de827f7afa0141e54fa9553b737396aaf06bd0040e159d1f790487b16d"}, @@ -965,6 +1016,8 @@ version = "0.115.8" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf"}, {file = "fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9"}, @@ -985,6 +1038,8 @@ version = "0.5.34" description = "FastStream: the simplest way to work with a messaging queues" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"consumer\"" files = [ {file = "faststream-0.5.34-py3-none-any.whl", hash = "sha256:aa7f61d6968a68f13ebf755cce9e8bf11b00717c28b2ef66e896b5d652a6c6a2"}, {file = "faststream-0.5.34.tar.gz", hash = "sha256:84615968c5768ebaa89b72ae66b53e5302c08e7d18b341ef5193e54cb6ba8623"}, @@ -1021,6 +1076,7 @@ version = "3.17.0" description = "A platform independent file lock." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"}, {file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"}, @@ -1037,6 +1093,7 @@ version = "7.1.1" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" +groups = ["dev"] files = [ {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, @@ -1053,6 +1110,7 @@ version = "1.2.3" description = "Flake8 plug-in loading the configuration from pyproject.toml" optional = false python-versions = ">= 3.6" +groups = ["dev"] files = [ {file = "flake8_pyproject-1.2.3-py3-none-any.whl", hash = "sha256:6249fe53545205af5e76837644dc80b4c10037e73a0e5db87ff562d75fb5bd4a"}, ] @@ -1070,6 +1128,7 @@ version = "2024.8.6" description = "A clean customisable Sphinx documentation theme." optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c"}, {file = "furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01"}, @@ -1087,6 +1146,7 @@ version = "24.11.1" description = "Coroutine-based network library" optional = false python-versions = ">=3.9" +groups = ["test"] files = [ {file = "gevent-24.11.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:92fe5dfee4e671c74ffaa431fd7ffd0ebb4b339363d24d0d944de532409b935e"}, {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7bfcfe08d038e1fa6de458891bca65c1ada6d145474274285822896a858c870"}, @@ -1147,6 +1207,7 @@ version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "test"] files = [ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, @@ -1222,6 +1283,7 @@ files = [ {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] +markers = {main = "extra == \"server\" or extra == \"consumer\"", dev = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"server\" or extra == \"consumer\"", test = "extra == \"server\" or extra == \"consumer\" or platform_python_implementation == \"CPython\""} [package.extras] docs = ["Sphinx", "furo"] @@ -1233,6 +1295,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main", "test"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -1244,6 +1307,7 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -1265,6 +1329,7 @@ version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, @@ -1290,6 +1355,8 @@ version = "10.0" description = "Human friendly output for text interfaces using Python" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +markers = "extra == \"server\" or extra == \"consumer\"" files = [ {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, @@ -1304,6 +1371,7 @@ version = "2.1.4" description = "Never use print() to debug again; inspect variables, expressions, and program execution with a single, simple function call." optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "icecream-2.1.4-py3-none-any.whl", hash = "sha256:7bb715f69102cae871b3a361c3b656536db02cfcadac9664c673581cac4df4fd"}, {file = "icecream-2.1.4.tar.gz", hash = "sha256:58755e58397d5350a76f25976dee7b607f5febb3c6e1cddfe6b1951896e91573"}, @@ -1321,6 +1389,7 @@ version = "2.6.6" description = "File identification library for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "identify-2.6.6-py2.py3-none-any.whl", hash = "sha256:cbd1810bce79f8b671ecb20f53ee0ae8e86ae84b557de31d89709dc2a48ba881"}, {file = "identify-2.6.6.tar.gz", hash = "sha256:7bec12768ed44ea4761efb47806f0a41f86e7c0a5fdf5950d4648c90eca7e251"}, @@ -1335,6 +1404,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main", "docs", "test"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -1349,6 +1419,7 @@ version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["docs"] files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, @@ -1360,6 +1431,7 @@ version = "24.7.2" description = "A small library that versions your Python projects." optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "incremental-24.7.2-py3-none-any.whl", hash = "sha256:8cb2c3431530bec48ad70513931a760f446ad6c25e8333ca5d95e24b0ed7b8fe"}, {file = "incremental-24.7.2.tar.gz", hash = "sha256:fb4f1d47ee60efe87d4f6f0ebb5f70b9760db2b2574c59c8e8912be4ebd464c9"}, @@ -1378,6 +1450,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -1389,6 +1462,7 @@ version = "6.0.0" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.9.0" +groups = ["dev"] files = [ {file = "isort-6.0.0-py3-none-any.whl", hash = "sha256:567954102bb47bb12e0fae62606570faacddd441e45683968c8d1734fb1af892"}, {file = "isort-6.0.0.tar.gz", hash = "sha256:75d9d8a1438a9432a7d7b54f2d3b45cad9a4a0fdba43617d9873379704a8bdf1"}, @@ -1404,6 +1478,8 @@ version = "2.2.0" description = "Safely pass data to untrusted environments and back." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, @@ -1415,6 +1491,7 @@ version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -1432,6 +1509,8 @@ version = "1.5.6" description = "Implementation of JOSE Web standards" optional = true python-versions = ">= 3.8" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789"}, {file = "jwcrypto-1.5.6.tar.gz", hash = "sha256:771a87762a0c081ae6166958a954f80848820b2ab066937dc8b8379d65b1b039"}, @@ -1447,6 +1526,8 @@ version = "1.3.8" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"server\" or extra == \"consumer\"" files = [ {file = "Mako-1.3.8-py3-none-any.whl", hash = "sha256:42f48953c7eb91332040ff567eb7eea69b22e7a4affbc5ba8e845e8f730f6627"}, {file = "mako-1.3.8.tar.gz", hash = "sha256:577b97e414580d3e088d47c2dbbe9594aa7a5146ed2875d4dfa9075af2dd3cc8"}, @@ -1466,10 +1547,12 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, ] +markers = {main = "extra == \"consumer\""} [package.dependencies] mdurl = ">=0.1,<1.0" @@ -1490,6 +1573,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["main", "docs"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1553,6 +1637,7 @@ files = [ {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] +markers = {main = "extra == \"server\" or extra == \"consumer\""} [[package]] name = "mccabe" @@ -1560,6 +1645,7 @@ version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -1571,10 +1657,12 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +markers = {main = "extra == \"consumer\""} [[package]] name = "mypy" @@ -1582,6 +1670,7 @@ version = "1.15.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, @@ -1635,6 +1724,7 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -1646,6 +1736,7 @@ version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] files = [ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, @@ -1657,6 +1748,7 @@ version = "1.8.0" description = "Sphinx extension to support docstrings in Numpy format" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "numpydoc-1.8.0-py3-none-any.whl", hash = "sha256:72024c7fd5e17375dec3608a27c03303e8ad00c81292667955c6fea7a3ccf541"}, {file = "numpydoc-1.8.0.tar.gz", hash = "sha256:022390ab7464a44f8737f79f8b31ce1d3cfa4b4af79ccaa1aac5e8368db587fb"}, @@ -1678,6 +1770,7 @@ version = "5.3.0" description = "Orderly set" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "orderly_set-5.3.0-py3-none-any.whl", hash = "sha256:c2c0bfe604f5d3d9b24e8262a06feb612594f37aa3845650548befd7772945d1"}, {file = "orderly_set-5.3.0.tar.gz", hash = "sha256:80b3d8fdd3d39004d9aad389eaa0eab02c71f0a0511ba3a6d54a935a6c6a0acc"}, @@ -1689,10 +1782,12 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "docs", "test"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] +markers = {main = "extra == \"server\" or extra == \"consumer\""} [[package]] name = "pathspec" @@ -1700,6 +1795,7 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -1711,6 +1807,7 @@ version = "6.1.0" description = "Python Build Reasonableness" optional = false python-versions = ">=2.6" +groups = ["dev"] files = [ {file = "pbr-6.1.0-py2.py3-none-any.whl", hash = "sha256:a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a"}, {file = "pbr-6.1.0.tar.gz", hash = "sha256:788183e382e3d1d7707db08978239965e8b9e4e5ed42669bf4758186734d5f24"}, @@ -1722,6 +1819,7 @@ version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -1738,6 +1836,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -1753,6 +1852,7 @@ version = "4.1.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pre_commit-4.1.0-py2.py3-none-any.whl", hash = "sha256:d29e7cb346295bcc1cc75fc3e92e343495e3ea0196c9ec6ba53f49f10ab6ae7b"}, {file = "pre_commit-4.1.0.tar.gz", hash = "sha256:ae3f018575a588e30dfddfab9a05448bfbd6b73d78709617b5a2b853549716d4"}, @@ -1771,6 +1871,8 @@ version = "0.21.1" description = "Python client for the Prometheus monitoring system." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, @@ -1785,6 +1887,7 @@ version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, @@ -1833,7 +1936,6 @@ files = [ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, @@ -1862,6 +1964,7 @@ version = "2.12.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, @@ -1873,129 +1976,142 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] +markers = {main = "extra == \"server\" and platform_python_implementation != \"PyPy\"", dev = "platform_python_implementation != \"PyPy\"", test = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""} [[package]] name = "pydantic" -version = "2.8.2" +version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main", "docs"] files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" -typing-extensions = [ - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, -] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, +groups = ["main", "docs"] +files = [ + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, ] [package.dependencies] @@ -2007,10 +2123,12 @@ version = "2.7.1" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" +groups = ["main", "docs"] files = [ {file = "pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd"}, {file = "pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93"}, ] +markers = {main = "extra == \"server\" or extra == \"consumer\""} [package.dependencies] pydantic = ">=2.7.0" @@ -2027,6 +2145,7 @@ version = "3.2.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, @@ -2038,10 +2157,12 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["main", "dev", "docs"] files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] +markers = {main = "extra == \"consumer\""} [package.extras] windows-terminal = ["colorama (>=0.4.6)"] @@ -2052,6 +2173,8 @@ version = "2.10.1" description = "JSON Web Token implementation in Python" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, @@ -2069,6 +2192,8 @@ version = "3.5.4" description = "A python implementation of GNU readline." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"server\" and sys_platform == \"win32\" or extra == \"consumer\" and sys_platform == \"win32\"" files = [ {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, @@ -2083,6 +2208,7 @@ version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -2105,6 +2231,7 @@ version = "0.21.2" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b"}, {file = "pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45"}, @@ -2123,6 +2250,7 @@ version = "2.2.1" description = "A simple plugin to list unused fixtures in pytest" optional = false python-versions = "*" +groups = ["test"] files = [ {file = "pytest-deadfixtures-2.2.1.tar.gz", hash = "sha256:ca15938a4e8330993ccec9c6c847383d88b3cd574729530647dc6b492daa9c1e"}, {file = "pytest_deadfixtures-2.2.1-py2.py3-none-any.whl", hash = "sha256:db71533f2d9456227084e00a1231e732973e299ccb7c37ab92e95032ab6c083e"}, @@ -2137,6 +2265,7 @@ version = "3.16.0" description = "Pytest plugin to randomly order tests and control random.seed." optional = false python-versions = ">=3.9" +groups = ["test"] files = [ {file = "pytest_randomly-3.16.0-py3-none-any.whl", hash = "sha256:8633d332635a1a0983d3bba19342196807f6afb17c3eef78e02c2f85dade45d6"}, {file = "pytest_randomly-3.16.0.tar.gz", hash = "sha256:11bf4d23a26484de7860d82f726c0629837cf4064b79157bd18ec9d41d7feb26"}, @@ -2151,6 +2280,7 @@ version = "15.0" description = "pytest plugin to re-run tests to eliminate flaky failures" optional = false python-versions = ">=3.9" +groups = ["test"] files = [ {file = "pytest-rerunfailures-15.0.tar.gz", hash = "sha256:2d9ac7baf59f4c13ac730b47f6fa80e755d1ba0581da45ce30b72fb3542b4474"}, {file = "pytest_rerunfailures-15.0-py3-none-any.whl", hash = "sha256:dd150c4795c229ef44320adc9a0c0532c51b78bb7a6843a8c53556b9a611df1a"}, @@ -2166,6 +2296,8 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "extra == \"server\" or extra == \"consumer\"" files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -2180,10 +2312,12 @@ version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.8" +groups = ["main", "docs"] files = [ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, ] +markers = {main = "extra == \"server\" or extra == \"consumer\""} [package.extras] cli = ["click (>=5.0)"] @@ -2194,6 +2328,8 @@ version = "3.2.1" description = "JSON Log Formatter for the Python Logging Package" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"server\" or extra == \"consumer\"" files = [ {file = "python_json_logger-3.2.1-py3-none-any.whl", hash = "sha256:cdc17047eb5374bd311e748b42f99d71223f3b0e186f4206cc5d52aefe85b090"}, {file = "python_json_logger-3.2.1.tar.gz", hash = "sha256:8eb0554ea17cb75b05d2848bc14fb02fbdbd9d6972120781b974380bfa162008"}, @@ -2208,6 +2344,8 @@ version = "5.3.1" description = "python-keycloak is a Python package providing access to the Keycloak API." optional = true python-versions = "<4.0,>=3.9" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "python_keycloak-5.3.1-py3-none-any.whl", hash = "sha256:d3167d788add29d6cb3fda0629ee0f453fac9f6ee35241938488dbf9ac0635bb"}, {file = "python_keycloak-5.3.1.tar.gz", hash = "sha256:e4bbdbdddc480d64c6211095d633bea025028eb895aa83ccdc44f90ab70eff28"}, @@ -2228,6 +2366,8 @@ version = "0.0.20" description = "A streaming multipart parser for Python" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104"}, {file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"}, @@ -2239,6 +2379,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -2294,6 +2435,7 @@ files = [ {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +markers = {main = "extra == \"server\" or extra == \"consumer\""} [[package]] name = "requests" @@ -2301,10 +2443,12 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main", "docs"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] +markers = {main = "extra == \"server\""} [package.dependencies] certifi = ">=2017.4.17" @@ -2322,6 +2466,8 @@ version = "1.0.0" description = "A utility belt for advanced users of python-requests" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, @@ -2336,6 +2482,7 @@ version = "0.22.0" description = "A utility for mocking out the Python HTTPX and HTTP Core libraries." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0"}, {file = "respx-0.22.0.tar.gz", hash = "sha256:3c8924caa2a50bd71aefc07aa812f2466ff489f1848c96e954a5362d17095d91"}, @@ -2350,10 +2497,12 @@ version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" +groups = ["main", "dev"] files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, ] +markers = {main = "extra == \"consumer\""} [package.dependencies] markdown-it-py = ">=2.2.0" @@ -2369,6 +2518,7 @@ version = "75.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" +groups = ["docs", "test"] files = [ {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, @@ -2389,6 +2539,8 @@ version = "1.5.4" description = "Tool to Detect Surrounding Shell" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"consumer\"" files = [ {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, @@ -2400,6 +2552,8 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "extra == \"server\" or extra == \"consumer\"" files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -2411,6 +2565,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main", "test"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -2422,6 +2577,7 @@ version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = false python-versions = "*" +groups = ["docs"] files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -2433,6 +2589,7 @@ version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, @@ -2444,6 +2601,7 @@ version = "8.1.3" description = "Python documentation generator" optional = false python-versions = ">=3.10" +groups = ["docs"] files = [ {file = "sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2"}, {file = "sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927"}, @@ -2479,6 +2637,7 @@ version = "0.5.2" description = "A sphinx extension that automatically documents argparse commands and options" optional = false python-versions = ">=3.10" +groups = ["docs"] files = [ {file = "sphinx_argparse-0.5.2-py3-none-any.whl", hash = "sha256:d771b906c36d26dee669dbdbb5605c558d9440247a5608b810f7fa6e26ab1fd3"}, {file = "sphinx_argparse-0.5.2.tar.gz", hash = "sha256:e5352f8fa894b6fb6fda0498ba28a9f8d435971ef4bbc1a6c9c6414e7644f032"}, @@ -2500,6 +2659,7 @@ version = "1.0.0b2" description = "A modern skeleton for Sphinx themes." optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"}, {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"}, @@ -2517,6 +2677,7 @@ version = "0.5.2" description = "Add a copy button to each of your code cells." optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, @@ -2535,6 +2696,7 @@ version = "0.6.1" description = "A sphinx extension for designing beautiful, view size responsive web components." optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinx_design-0.6.1-py3-none-any.whl", hash = "sha256:b11f37db1a802a183d61b159d9a202314d4d2fe29c163437001324fe2f19549c"}, {file = "sphinx_design-0.6.1.tar.gz", hash = "sha256:b44eea3719386d04d765c1a8257caca2b3e6f8421d7b3a5e742c0fd45f84e632"}, @@ -2560,6 +2722,7 @@ version = "1.0.1" description = "Sphinx Extension adding support for custom favicons" optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "sphinx-favicon-1.0.1.tar.gz", hash = "sha256:df796de32125609c1b4a8964db74270ebf4502089c27cd53f542354dc0b57e8e"}, {file = "sphinx_favicon-1.0.1-py3-none-any.whl", hash = "sha256:7c93d6b634cb4c9687ceab67a8526f05d3b02679df94e273e51a43282e6b034c"}, @@ -2579,6 +2742,7 @@ version = "5.0.0" description = "A Sphinx extension for linking to your project's issue tracker" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinx_issues-5.0.0-py3-none-any.whl", hash = "sha256:d80704a01c8af3d76586771a67a9e48f2d1a6091a0377458c49908460a6a31ea"}, {file = "sphinx_issues-5.0.0.tar.gz", hash = "sha256:192e43cf071ed7aead401cd14fd15076ecb0866238c095d672180618740c6bae"}, @@ -2597,6 +2761,7 @@ version = "0.3.8" description = "Get the \"last updated\" time for each Sphinx page from Git" optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "sphinx_last_updated_by_git-0.3.8-py3-none-any.whl", hash = "sha256:6382c8285ac1f222483a58569b78c0371af5e55f7fbf9c01e5e8a72d6fdfa499"}, {file = "sphinx_last_updated_by_git-0.3.8.tar.gz", hash = "sha256:c145011f4609d841805b69a9300099fc02fed8f5bb9e5bcef77d97aea97b7761"}, @@ -2611,6 +2776,7 @@ version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, @@ -2627,6 +2793,7 @@ version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, @@ -2643,6 +2810,7 @@ version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, @@ -2659,6 +2827,7 @@ version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = false python-versions = ">=3.5" +groups = ["docs"] files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -2673,6 +2842,7 @@ version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, @@ -2689,6 +2859,7 @@ version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, @@ -2705,6 +2876,7 @@ version = "0.4.0a0" description = "An RST directive for injecting a Towncrier-generated changelog draft containing fragments for the unreleased (next) project version" optional = false python-versions = ">=3.6" +groups = ["docs"] files = [ {file = "sphinxcontrib-towncrier-0.4.0a0.tar.gz", hash = "sha256:d9b1513fc07781432dd3a0b2ca797cfe0e99e9b5bc5e5c8bf112d5d142afb6dc"}, {file = "sphinxcontrib_towncrier-0.4.0a0-py3-none-any.whl", hash = "sha256:ec734e3d0920e2ce26e99681119f398a9e1fc0aa6c2d7ed1f052f1219dcd4653"}, @@ -2720,6 +2892,7 @@ version = "2.0.38" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e1d9e429028ce04f187a9f522818386c8b076723cdbe9345708384f49ebcec6"}, {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b87a90f14c68c925817423b0424381f0e16d80fc9a1a1046ef202ab25b19a444"}, @@ -2779,6 +2952,7 @@ files = [ {file = "SQLAlchemy-2.0.38-py3-none-any.whl", hash = "sha256:63178c675d4c80def39f1febd625a6333f44c0ba269edd8a468b156394b27753"}, {file = "sqlalchemy-2.0.38.tar.gz", hash = "sha256:e5a4d82bdb4bf1ac1285a68eab02d253ab73355d9f0fe725a97e1e0fa689decb"}, ] +markers = {main = "extra == \"server\" or extra == \"consumer\""} [package.dependencies] greenlet = {version = "!=0.4.17", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} @@ -2816,6 +2990,8 @@ version = "0.41.2" description = "Various utility functions for SQLAlchemy." optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"server\" or extra == \"consumer\"" files = [ {file = "SQLAlchemy-Utils-0.41.2.tar.gz", hash = "sha256:bc599c8c3b3319e53ce6c5c3c471120bd325d0071fb6f38a10e924e3d07b9990"}, {file = "SQLAlchemy_Utils-0.41.2-py3-none-any.whl", hash = "sha256:85cf3842da2bf060760f955f8467b87983fb2e30f1764fd0e24a48307dc8ec6e"}, @@ -2844,6 +3020,8 @@ version = "0.45.3" description = "The little ASGI library that shines." optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d"}, {file = "starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f"}, @@ -2861,6 +3039,8 @@ version = "0.23.0" description = "Prometheus metrics exporter for Starlette applications." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "starlette_exporter-0.23.0-py3-none-any.whl", hash = "sha256:ea1a27f2aae48122931e2384a361a03e00261efbb4a665ce1ae2e46f29123d5e"}, {file = "starlette_exporter-0.23.0.tar.gz", hash = "sha256:f80998db2d4a3462808a9bce56950046b113d3fab6ec6c20cb6de4431d974969"}, @@ -2876,6 +3056,7 @@ version = "5.4.0" description = "Manage dynamic plugins for Python applications" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "stevedore-5.4.0-py3-none-any.whl", hash = "sha256:b0be3c4748b3ea7b854b265dcb4caa891015e442416422be16f8b31756107857"}, {file = "stevedore-5.4.0.tar.gz", hash = "sha256:79e92235ecb828fe952b6b8b0c6c87863248631922c8e8e0fa5b17b232c4514d"}, @@ -2890,6 +3071,7 @@ version = "0.9.0" description = "Pretty-print tabular data" optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, @@ -2904,6 +3086,8 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["dev", "docs", "test"] +markers = "python_version < \"3.11\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -2945,6 +3129,7 @@ version = "23.11.0" description = "Building newsfiles for your project." optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "towncrier-23.11.0-py3-none-any.whl", hash = "sha256:2e519ca619426d189e3c98c99558fe8be50c9ced13ea1fc20a4a353a95d2ded7"}, {file = "towncrier-23.11.0.tar.gz", hash = "sha256:13937c247e3f8ae20ac44d895cf5f96a60ad46cfdcc1671759530d7837d9ee5d"}, @@ -2965,6 +3150,8 @@ version = "0.15.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"consumer\"" files = [ {file = "typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847"}, {file = "typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a"}, @@ -2982,6 +3169,7 @@ version = "1.5.0.20241221" description = "Typing stubs for jwcrypto" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_jwcrypto-1.5.0.20241221-py3-none-any.whl", hash = "sha256:0a10db32d169e15c289cb82822f763f498f42d11bbe6be36423496452d9f47a0"}, {file = "types_jwcrypto-1.5.0.20241221.tar.gz", hash = "sha256:9e485c965fb993b47ce35f2b748b08497563a92490459f6e253389ebf2c94760"}, @@ -2996,6 +3184,7 @@ version = "2.9.0.20241206" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, @@ -3007,6 +3196,7 @@ version = "6.0.12.20241230" description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"}, {file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"}, @@ -3018,6 +3208,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "docs", "test"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -3029,10 +3220,12 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main", "docs"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] +markers = {main = "extra == \"server\""} [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] @@ -3046,6 +3239,8 @@ version = "2024.7.10" description = "New time-based UUID formats which are suited for use as a database key" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"server\" or extra == \"consumer\"" files = [ {file = "uuid6-2024.7.10-py3-none-any.whl", hash = "sha256:93432c00ba403751f722829ad21759ff9db051dea140bf81493271e8e4dd18b7"}, {file = "uuid6-2024.7.10.tar.gz", hash = "sha256:2d29d7f63f593caaeea0e0d0dd0ad8129c9c663b29e19bdf882e864bedf18fb0"}, @@ -3057,6 +3252,8 @@ version = "0.34.0" description = "The lightning-fast ASGI server." optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"server\"" files = [ {file = "uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4"}, {file = "uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9"}, @@ -3076,6 +3273,7 @@ version = "20.29.1" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "virtualenv-20.29.1-py3-none-any.whl", hash = "sha256:4e4cb403c0b0da39e13b46b1b2476e505cb0046b25f242bee80f62bf990b2779"}, {file = "virtualenv-20.29.1.tar.gz", hash = "sha256:b8b8970138d32fb606192cb97f6cd4bb644fa486be9308fb9b63f81091b5dc35"}, @@ -3096,6 +3294,8 @@ version = "1.0.4" description = "Simple, modern and high performance file watching and code reload in python." optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"consumer\"" files = [ {file = "watchfiles-1.0.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ba5bb3073d9db37c64520681dd2650f8bd40902d991e7b4cfaeece3e32561d08"}, {file = "watchfiles-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f25d0ba0fe2b6d2c921cf587b2bf4c451860086534f40c384329fb96e2044d1"}, @@ -3179,6 +3379,7 @@ version = "1.0.0" description = "The strictest and most opinionated python linter ever" optional = false python-versions = "<4.0,>=3.10" +groups = ["dev"] files = [ {file = "wemake_python_styleguide-1.0.0-py3-none-any.whl", hash = "sha256:c0cc3fe2a2aa8d7ca76f02bc27bfe344c76ed9652f94859c241b96f0fcefa9ac"}, {file = "wemake_python_styleguide-1.0.0.tar.gz", hash = "sha256:00e96dc73faf5471b21d4baa5041f457c19739cf14677832429be69e5f4f7964"}, @@ -3195,6 +3396,7 @@ version = "5.0" description = "Very basic event publishing system" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, @@ -3213,6 +3415,7 @@ version = "7.2" description = "Interfaces for Python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "zope.interface-7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce290e62229964715f1011c3dbeab7a4a1e4971fd6f31324c4519464473ef9f2"}, {file = "zope.interface-7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05b910a5afe03256b58ab2ba6288960a2892dfeef01336dc4be6f1b9ed02ab0a"}, @@ -3267,6 +3470,6 @@ postgres = ["asyncpg"] server = ["alembic", "asgi-correlation-id", "coloredlogs", "fastapi", "greenlet", "itsdangerous", "packaging", "pydantic-settings", "pyjwt", "python-dateutil", "python-json-logger", "python-keycloak", "python-multipart", "pyyaml", "sqlalchemy", "sqlalchemy-utils", "starlette", "starlette-exporter", "uuid6", "uvicorn"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.10" -content-hash = "ec804bbe666d79e114ab96c1e66c80ed5d8aeb3c96f831b65f567b543018659d" +content-hash = "7609a9edab9b951bce56cab0690818f60a057ab410cbe27869fbfb50f5ec635b" diff --git a/pyproject.toml b/pyproject.toml index ccf48da8..37f0761a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,8 +45,7 @@ include = [ [tool.poetry.dependencies] python = "^3.10" -# https://github.com/pydantic/pydantic/issues/10964 -pydantic = "~2.8.2" +pydantic = "^2.10.6" typing-extensions = "^4.12.2" pydantic-settings = {version = "^2.7.1", optional = true} alembic = {version = "^1.14.0", optional = true} diff --git a/tests/test_server/test_lineage/test_dataset_lineage.py b/tests/test_server/test_lineage/test_dataset_lineage.py index 247e9cbe..3d6a4895 100644 --- a/tests/test_server/test_lineage/test_dataset_lineage.py +++ b/tests/test_server/test_lineage/test_dataset_lineage.py @@ -57,8 +57,18 @@ async def test_get_dataset_lineage_no_relations( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": [], - "nodes": datasets_to_json([dataset]), + "relations": { + "parents": [], + "symlinks": [], + "inputs": [], + "outputs": [], + }, + "nodes": { + "datasets": datasets_to_json([dataset]), + "jobs": {}, + "runs": {}, + "operations": {}, + }, } @@ -102,12 +112,18 @@ async def test_get_dataset_lineage_with_granularity_run( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json([dataset]) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json([dataset]), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -147,11 +163,18 @@ async def test_get_dataset_lineage_with_granularity_job( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB") - + outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json([dataset])), + "relations": { + "parents": [], + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB"), + "outputs": outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB"), + }, + "nodes": { + "datasets": datasets_to_json([dataset]), + "jobs": jobs_to_json(jobs), + "runs": {}, + "operations": {}, + }, } @@ -200,15 +223,18 @@ async def test_get_dataset_lineage_with_granularity_operation( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + operation_parents_to_json(operations) - + inputs_to_json(inputs, granularity="OPERATION") - + outputs_to_json(outputs, granularity="OPERATION") - ), - "nodes": ( - jobs_to_json(jobs) + datasets_to_json([dataset]) + runs_to_json(runs) + operations_to_json(operations) - ), + "relations": { + "parents": run_parents_to_json(runs) + operation_parents_to_json(operations), + "symlinks": [], + "inputs": inputs_to_json(inputs, granularity="OPERATION"), + "outputs": outputs_to_json(outputs, granularity="OPERATION"), + }, + "nodes": { + "datasets": datasets_to_json([dataset]), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": operations_to_json(operations), + }, } @@ -249,8 +275,18 @@ async def test_get_dataset_lineage_with_direction_downstream( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": (run_parents_to_json(runs) + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN")), - "nodes": (jobs_to_json(jobs) + datasets_to_json([dataset]) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": [], + }, + "nodes": { + "datasets": datasets_to_json([dataset]), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -292,8 +328,18 @@ async def test_get_dataset_lineage_with_direction_upstream( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": (run_parents_to_json(runs) + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN")), - "nodes": (jobs_to_json(jobs) + datasets_to_json([dataset]) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": [], + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json([dataset]), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -343,12 +389,18 @@ async def test_get_dataset_lineage_with_until( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + inputs_to_json(merge_io_by_runs(inputs_with_until), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs_with_until), granularity="RUN") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json([dataset]) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json([dataset]), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -427,12 +479,18 @@ async def test_get_dataset_lineage_with_depth( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json(datasets) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -504,11 +562,18 @@ async def test_get_dataset_lineage_with_depth_and_granularity_job( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB") - + outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json(datasets)), + "relations": { + "parents": [], + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB"), + "outputs": outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": {}, + "operations": {}, + }, } @@ -593,15 +658,18 @@ async def test_get_dataset_lineage_with_depth_and_granularity_operation( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + operation_parents_to_json(operations) - + inputs_to_json(inputs, granularity="OPERATION") - + outputs_to_json(outputs, granularity="OPERATION") - ), - "nodes": ( - jobs_to_json(jobs) + datasets_to_json(datasets) + runs_to_json(runs) + operations_to_json(operations) - ), + "relations": { + "parents": run_parents_to_json(runs) + operation_parents_to_json(operations), + "symlinks": [], + "inputs": inputs_to_json(inputs, granularity="OPERATION"), + "outputs": outputs_to_json(outputs, granularity="OPERATION"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": operations_to_json(operations), + }, } @@ -636,12 +704,18 @@ async def test_get_dataset_lineage_with_depth_ignore_cycles( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + inputs_to_json(merge_io_by_runs(lineage.inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(lineage.outputs), granularity="RUN") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json(datasets) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(lineage.inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(lineage.outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -706,12 +780,18 @@ async def test_get_dataset_lineage_with_depth_ignore_unrelated_datasets( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json(datasets) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -771,13 +851,18 @@ async def test_get_dataset_lineage_with_symlink( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + symlinks_to_json(dataset_symlinks) - + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json(datasets) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": symlinks_to_json(dataset_symlinks), + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -839,12 +924,18 @@ async def test_get_dataset_lineage_unmergeable_schema_and_output_type( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json([dataset]) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json([dataset]), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -921,10 +1012,16 @@ async def test_get_dataset_lineage_empty_io_stats_and_schema( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + inputs_to_json(merged_inputs, granularity="RUN") - + outputs_to_json(merged_outputs, granularity="RUN") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json([dataset]) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": inputs_to_json(merged_inputs, granularity="RUN"), + "outputs": outputs_to_json(merged_outputs, granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json([dataset]), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } diff --git a/tests/test_server/test_lineage/test_get_lineage_request_validators.py b/tests/test_server/test_lineage/test_get_lineage_request_validators.py index 9b24c4f3..15697819 100644 --- a/tests/test_server/test_lineage/test_get_lineage_request_validators.py +++ b/tests/test_server/test_lineage/test_get_lineage_request_validators.py @@ -88,8 +88,18 @@ async def test_get_lineage_missing_id( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": [], - "nodes": [], + "relations": { + "parents": [], + "symlinks": [], + "inputs": [], + "outputs": [], + }, + "nodes": { + "datasets": {}, + "jobs": {}, + "runs": {}, + "operations": {}, + }, } diff --git a/tests/test_server/test_lineage/test_job_lineage.py b/tests/test_server/test_lineage/test_job_lineage.py index 164bb921..fc153a4e 100644 --- a/tests/test_server/test_lineage/test_job_lineage.py +++ b/tests/test_server/test_lineage/test_job_lineage.py @@ -54,8 +54,18 @@ async def test_get_job_lineage_no_runs( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": [], - "nodes": jobs_to_json([job]), + "relations": { + "parents": [], + "symlinks": [], + "inputs": [], + "outputs": [], + }, + "nodes": { + "datasets": {}, + "jobs": jobs_to_json([job]), + "runs": {}, + "operations": {}, + }, } @@ -81,8 +91,18 @@ async def test_get_job_lineage_no_operations( assert response.status_code == HTTPStatus.OK, response.json() # runs without operations are excluded assert response.json() == { - "relations": [], - "nodes": jobs_to_json([job]), + "relations": { + "parents": [], + "symlinks": [], + "inputs": [], + "outputs": [], + }, + "nodes": { + "datasets": {}, + "jobs": jobs_to_json([job]), + "runs": {}, + "operations": {}, + }, } @@ -108,8 +128,18 @@ async def test_get_job_lineage_no_inputs_outputs( assert response.status_code == HTTPStatus.OK, response.json() # runs without inputs/outputs are excluded, assert response.json() == { - "relations": [], - "nodes": jobs_to_json([job]), + "relations": { + "parents": [], + "symlinks": [], + "inputs": [], + "outputs": [], + }, + "nodes": { + "datasets": {}, + "jobs": jobs_to_json([job]), + "runs": {}, + "operations": {}, + }, } @@ -147,11 +177,18 @@ async def test_get_job_lineage_simple( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB") - + outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB") - ), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets)), + "relations": { + "parents": [], + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB"), + "outputs": outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": {}, + "operations": {}, + }, } @@ -187,8 +224,18 @@ async def test_get_job_lineage_with_direction_downstream( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": (outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB")), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets)), + "relations": { + "parents": [], + "symlinks": [], + "inputs": [], + "outputs": outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": {}, + "operations": {}, + }, } @@ -224,8 +271,18 @@ async def test_get_job_lineage_with_direction_upstream( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": (inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB")), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets)), + "relations": { + "parents": [], + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB"), + "outputs": [], + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": {}, + "operations": {}, + }, } @@ -265,11 +322,18 @@ async def test_get_job_lineage_with_until( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB") - + outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB") - ), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets)), + "relations": { + "parents": [], + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB"), + "outputs": outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": {}, + "operations": {}, + }, } @@ -313,12 +377,18 @@ async def test_get_job_lineage_with_granularity_run( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN") - ), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -389,11 +459,18 @@ async def test_get_job_lineage_with_depth( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB") - + outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json(datasets)), + "relations": { + "parents": [], + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB"), + "outputs": outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": {}, + "operations": {}, + }, } @@ -471,12 +548,18 @@ async def test_get_job_lineage_with_depth_and_granularity_run( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json(datasets) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -510,11 +593,18 @@ async def test_get_job_lineage_with_depth_ignore_cycles( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - inputs_to_json(merge_io_by_jobs(lineage.inputs), granularity="JOB") - + outputs_to_json(merge_io_by_jobs(lineage.outputs), granularity="JOB") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json(datasets)), + "relations": { + "parents": [], + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_jobs(lineage.inputs), granularity="JOB"), + "outputs": outputs_to_json(merge_io_by_jobs(lineage.outputs), granularity="JOB"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": {}, + "operations": {}, + }, } @@ -580,11 +670,18 @@ async def test_get_job_lineage_with_depth_ignore_unrelated_datasets( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB") - + outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json(datasets)), + "relations": { + "parents": [], + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB"), + "outputs": outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": {}, + "operations": {}, + }, } @@ -633,12 +730,18 @@ async def test_get_job_lineage_with_symlinks( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - symlinks_to_json(dataset_symlinks) - + inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB") - + outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB") - ), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets)), + "relations": { + "parents": [], + "symlinks": symlinks_to_json(dataset_symlinks), + "inputs": inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB"), + "outputs": outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": {}, + "operations": {}, + }, } @@ -697,11 +800,18 @@ async def test_get_job_lineage_unmergeable_inputs_and_outputs( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB") - + outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB") - ), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets)), + "relations": { + "parents": [], + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_jobs(inputs), granularity="JOB"), + "outputs": outputs_to_json(merge_io_by_jobs(outputs), granularity="JOB"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": {}, + "operations": {}, + }, } @@ -775,8 +885,16 @@ async def test_get_job_lineage_empty_io_stats_and_schema( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - inputs_to_json(merged_inputs, granularity="JOB") + outputs_to_json(merged_outputs, granularity="JOB") - ), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets)), + "relations": { + "parents": [], + "symlinks": [], + "inputs": inputs_to_json(merged_inputs, granularity="JOB"), + "outputs": outputs_to_json(merged_outputs, granularity="JOB"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": {}, + "operations": {}, + }, } diff --git a/tests/test_server/test_lineage/test_operation_lineage.py b/tests/test_server/test_lineage/test_operation_lineage.py index 75389f9a..b21d6840 100644 --- a/tests/test_server/test_lineage/test_operation_lineage.py +++ b/tests/test_server/test_lineage/test_operation_lineage.py @@ -57,8 +57,18 @@ async def test_get_operation_lineage_no_inputs_outputs( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": (run_parents_to_json([run]) + operation_parents_to_json([operation])), - "nodes": (jobs_to_json([job]) + runs_to_json([run]) + operations_to_json([operation])), + "relations": { + "parents": run_parents_to_json([run]) + operation_parents_to_json([operation]), + "symlinks": [], + "inputs": [], + "outputs": [], + }, + "nodes": { + "datasets": {}, + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": operations_to_json([operation]), + }, } @@ -97,15 +107,18 @@ async def test_get_operation_lineage_simple( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json([run]) - + operation_parents_to_json([operation]) - + inputs_to_json(inputs, granularity="OPERATION") - + outputs_to_json(outputs, granularity="OPERATION") - ), - "nodes": ( - jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run]) + operations_to_json([operation]) - ), + "relations": { + "parents": run_parents_to_json([run]) + operation_parents_to_json([operation]), + "symlinks": [], + "inputs": inputs_to_json(inputs, granularity="OPERATION"), + "outputs": outputs_to_json(outputs, granularity="OPERATION"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": operations_to_json([operation]), + }, } @@ -143,14 +156,18 @@ async def test_get_operation_lineage_with_direction_downstream( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json([run]) - + operation_parents_to_json([operation]) - + outputs_to_json(outputs, granularity="OPERATION") - ), - "nodes": ( - jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run]) + operations_to_json([operation]) - ), + "relations": { + "parents": run_parents_to_json([run]) + operation_parents_to_json([operation]), + "symlinks": [], + "inputs": [], + "outputs": outputs_to_json(outputs, granularity="OPERATION"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": operations_to_json([operation]), + }, } @@ -188,14 +205,18 @@ async def test_get_operation_lineage_with_direction_upstream( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json([run]) - + operation_parents_to_json([operation]) - + inputs_to_json(inputs, granularity="OPERATION") - ), - "nodes": ( - jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run]) + operations_to_json([operation]) - ), + "relations": { + "parents": run_parents_to_json([run]) + operation_parents_to_json([operation]), + "symlinks": [], + "inputs": inputs_to_json(inputs, granularity="OPERATION"), + "outputs": [], + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": operations_to_json([operation]), + }, } @@ -248,15 +269,18 @@ async def test_get_operation_lineage_with_until( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json([run]) - + operation_parents_to_json([operation]) - + inputs_to_json(inputs, granularity="OPERATION") - + outputs_to_json(outputs, granularity="OPERATION") - ), - "nodes": ( - jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run]) + operations_to_json([operation]) - ), + "relations": { + "parents": run_parents_to_json([run]) + operation_parents_to_json([operation]), + "symlinks": [], + "inputs": inputs_to_json(inputs, granularity="OPERATION"), + "outputs": outputs_to_json(outputs, granularity="OPERATION"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": operations_to_json([operation]), + }, } @@ -342,15 +366,18 @@ async def test_get_operation_lineage_with_depth( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + operation_parents_to_json(operations) - + inputs_to_json(inputs, granularity="OPERATION") - + outputs_to_json(outputs, granularity="OPERATION") - ), - "nodes": ( - jobs_to_json(jobs) + datasets_to_json(datasets) + runs_to_json(runs) + operations_to_json(operations) - ), + "relations": { + "parents": run_parents_to_json(runs) + operation_parents_to_json(operations), + "symlinks": [], + "inputs": inputs_to_json(inputs, granularity="OPERATION"), + "outputs": outputs_to_json(outputs, granularity="OPERATION"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": operations_to_json(operations), + }, } @@ -385,18 +412,18 @@ async def test_get_operation_lineage_with_depth_ignore_cycles( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + operation_parents_to_json(lineage.operations) - + inputs_to_json(lineage.inputs, granularity="OPERATION") - + outputs_to_json(lineage.outputs, granularity="OPERATION") - ), - "nodes": ( - jobs_to_json(jobs) - + datasets_to_json(datasets) - + runs_to_json(runs) - + operations_to_json(lineage.operations) - ), + "relations": { + "parents": run_parents_to_json(runs) + operation_parents_to_json(lineage.operations), + "symlinks": [], + "inputs": inputs_to_json(lineage.inputs, granularity="OPERATION"), + "outputs": outputs_to_json(lineage.outputs, granularity="OPERATION"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": operations_to_json(lineage.operations), + }, } @@ -462,18 +489,18 @@ async def test_get_operation_lineage_with_depth_ignore_unrelated_datasets( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + operation_parents_to_json(lineage.operations) - + inputs_to_json(inputs, granularity="OPERATION") - + outputs_to_json(outputs, granularity="OPERATION") - ), - "nodes": ( - jobs_to_json(jobs) - + datasets_to_json(datasets) - + runs_to_json(runs) - + operations_to_json(lineage.operations) - ), + "relations": { + "parents": run_parents_to_json(runs) + operation_parents_to_json(lineage.operations), + "symlinks": [], + "inputs": inputs_to_json(inputs, granularity="OPERATION"), + "outputs": outputs_to_json(outputs, granularity="OPERATION"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": operations_to_json(lineage.operations), + }, } @@ -523,16 +550,18 @@ async def test_get_operation_lineage_with_symlinks( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json([run]) - + operation_parents_to_json([operation]) - + symlinks_to_json(dataset_symlinks) - + inputs_to_json(inputs, granularity="OPERATION") - + outputs_to_json(outputs, granularity="OPERATION") - ), - "nodes": ( - jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run]) + operations_to_json([operation]) - ), + "relations": { + "parents": run_parents_to_json([run]) + operation_parents_to_json([operation]), + "symlinks": symlinks_to_json(dataset_symlinks), + "inputs": inputs_to_json(inputs, granularity="OPERATION"), + "outputs": outputs_to_json(outputs, granularity="OPERATION"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": operations_to_json([operation]), + }, } @@ -593,13 +622,16 @@ async def test_get_operation_lineage_with_empty_io_stats_and_schema( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json([run]) - + operation_parents_to_json([operation]) - + inputs_to_json(inputs, granularity="OPERATION") - + outputs_to_json(outputs, granularity="OPERATION") - ), - "nodes": ( - jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run]) + operations_to_json([operation]) - ), + "relations": { + "parents": run_parents_to_json([run]) + operation_parents_to_json([operation]), + "symlinks": [], + "inputs": inputs_to_json(inputs, granularity="OPERATION"), + "outputs": outputs_to_json(outputs, granularity="OPERATION"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": operations_to_json([operation]), + }, } diff --git a/tests/test_server/test_lineage/test_run_lineage.py b/tests/test_server/test_lineage/test_run_lineage.py index 487f595f..c1ef9afc 100644 --- a/tests/test_server/test_lineage/test_run_lineage.py +++ b/tests/test_server/test_lineage/test_run_lineage.py @@ -58,8 +58,18 @@ async def test_get_run_lineage_no_operations( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": run_parents_to_json([run]), - "nodes": (jobs_to_json([job]) + runs_to_json([run])), + "relations": { + "parents": run_parents_to_json([run]), + "symlinks": [], + "inputs": [], + "outputs": [], + }, + "nodes": { + "datasets": {}, + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": {}, + }, } @@ -85,8 +95,18 @@ async def test_get_run_lineage_no_inputs_outputs( assert response.status_code == HTTPStatus.OK, response.json() # operations without inputs/outputs are excluded assert response.json() == { - "relations": run_parents_to_json([run]), - "nodes": (jobs_to_json([job]) + runs_to_json([run])), + "relations": { + "parents": run_parents_to_json([run]), + "symlinks": [], + "inputs": [], + "outputs": [], + }, + "nodes": { + "datasets": {}, + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": {}, + }, } @@ -125,12 +145,18 @@ async def test_get_run_lineage_simple( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json([run]) - + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN") - ), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run])), + "relations": { + "parents": run_parents_to_json([run]), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": {}, + }, } @@ -174,15 +200,18 @@ async def test_get_run_lineage_with_granularity_operation( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json([run]) - + operation_parents_to_json(operations) - + inputs_to_json(inputs, granularity="OPERATION") - + outputs_to_json(outputs, granularity="OPERATION") - ), - "nodes": ( - jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run]) + operations_to_json(operations) - ), + "relations": { + "parents": run_parents_to_json([run]) + operation_parents_to_json(operations), + "symlinks": [], + "inputs": inputs_to_json(inputs, granularity="OPERATION"), + "outputs": outputs_to_json(outputs, granularity="OPERATION"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": operations_to_json(operations), + }, } @@ -219,8 +248,18 @@ async def test_get_run_lineage_with_direction_downstream( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": (run_parents_to_json([run]) + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN")), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run])), + "relations": { + "parents": run_parents_to_json([run]), + "symlinks": [], + "inputs": [], + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": {}, + }, } @@ -257,8 +296,18 @@ async def test_get_run_lineage_with_direction_upstream( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": (run_parents_to_json([run]) + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN")), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run])), + "relations": { + "parents": run_parents_to_json([run]), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": [], + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": {}, + }, } @@ -303,12 +352,18 @@ async def test_get_run_lineage_with_until( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json([run]) - + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN") - ), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run])), + "relations": { + "parents": run_parents_to_json([run]), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": {}, + }, } @@ -386,12 +441,18 @@ async def test_get_run_lineage_with_depth( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json(datasets) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -482,15 +543,18 @@ async def test_get_run_lineage_with_depth_and_granularity_operation( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + operation_parents_to_json(operations) - + inputs_to_json(inputs, granularity="OPERATION") - + outputs_to_json(outputs, granularity="OPERATION") - ), - "nodes": ( - jobs_to_json(jobs) + datasets_to_json(datasets) + runs_to_json(runs) + operations_to_json(operations) - ), + "relations": { + "parents": run_parents_to_json(runs) + operation_parents_to_json(operations), + "symlinks": [], + "inputs": inputs_to_json(inputs, granularity="OPERATION"), + "outputs": outputs_to_json(outputs, granularity="OPERATION"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": operations_to_json(operations), + }, } @@ -524,12 +588,18 @@ async def test_get_run_lineage_with_depth_ignore_cycles( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + inputs_to_json(merge_io_by_runs(lineage.inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(lineage.outputs), granularity="RUN") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json(datasets) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(lineage.inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(lineage.outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -595,12 +665,18 @@ async def test_get_run_lineage_with_depth_ignore_unrelated_datasets( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json(runs) - + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN") - ), - "nodes": (jobs_to_json(jobs) + datasets_to_json(datasets) + runs_to_json(runs)), + "relations": { + "parents": run_parents_to_json(runs), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json(jobs), + "runs": runs_to_json(runs), + "operations": {}, + }, } @@ -649,13 +725,18 @@ async def test_get_run_lineage_with_symlinks( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json([run]) - + symlinks_to_json(dataset_symlinks) - + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN") - ), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run])), + "relations": { + "parents": run_parents_to_json([run]), + "symlinks": symlinks_to_json(dataset_symlinks), + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": {}, + }, } @@ -713,12 +794,18 @@ async def test_get_run_lineage_unmergeable_inputs_and_outputs( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json([run]) - + inputs_to_json(merge_io_by_runs(inputs), granularity="RUN") - + outputs_to_json(merge_io_by_runs(outputs), granularity="RUN") - ), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run])), + "relations": { + "parents": run_parents_to_json([run]), + "symlinks": [], + "inputs": inputs_to_json(merge_io_by_runs(inputs), granularity="RUN"), + "outputs": outputs_to_json(merge_io_by_runs(outputs), granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": {}, + }, } @@ -791,10 +878,16 @@ async def test_get_run_lineage_empty_io_stats_and_schema( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { - "relations": ( - run_parents_to_json([run]) - + inputs_to_json(merged_inputs, granularity="RUN") - + outputs_to_json(merged_outputs, granularity="RUN") - ), - "nodes": (jobs_to_json([job]) + datasets_to_json(datasets) + runs_to_json([run])), + "relations": { + "parents": run_parents_to_json([run]), + "symlinks": [], + "inputs": inputs_to_json(merged_inputs, granularity="RUN"), + "outputs": outputs_to_json(merged_outputs, granularity="RUN"), + }, + "nodes": { + "datasets": datasets_to_json(datasets), + "jobs": jobs_to_json([job]), + "runs": runs_to_json([run]), + "operations": {}, + }, } diff --git a/tests/test_server/test_locations/test_patch_locations.py b/tests/test_server/test_locations/test_patch_locations.py index 27c7cc45..60a3ee9c 100644 --- a/tests/test_server/test_locations/test_patch_locations.py +++ b/tests/test_server/test_locations/test_patch_locations.py @@ -6,6 +6,7 @@ from data_rentgen.db.models import Location from tests.fixtures.mocks import MockedUser +from tests.test_server.utils.convert_to_json import location_to_json from tests.test_server.utils.enrich import enrich_locations pytestmark = [pytest.mark.server, pytest.mark.asyncio] @@ -37,10 +38,7 @@ async def test_set_location_external_id( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { "data": { - "id": location.id, - "name": location.name, - "type": location.type, - "addresses": [{"url": address.url} for address in location.addresses], + **location_to_json(location), "external_id": "external_id", }, "statistics": { @@ -70,10 +68,7 @@ async def test_change_location_external_id( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { "data": { - "id": location.id, - "name": location.name, - "type": location.type, - "addresses": [{"url": address.url} for address in location.addresses], + **location_to_json(location), "external_id": "new_external_id", }, "statistics": { @@ -103,10 +98,7 @@ async def test_reset_location_external_id( assert response.status_code == HTTPStatus.OK, response.json() assert response.json() == { "data": { - "id": location.id, - "name": location.name, - "type": location.type, - "addresses": [{"url": address.url} for address in location.addresses], + **location_to_json(location), "external_id": None, }, "statistics": { diff --git a/tests/test_server/utils/convert_to_json.py b/tests/test_server/utils/convert_to_json.py index 031e2be8..ad5311d0 100644 --- a/tests/test_server/utils/convert_to_json.py +++ b/tests/test_server/utils/convert_to_json.py @@ -24,8 +24,7 @@ def format_datetime(value: datetime): def run_parent_to_json(run: Run): return { - "kind": "PARENT", - "from": {"kind": "JOB", "id": run.job_id}, + "from": {"kind": "JOB", "id": str(run.job_id)}, "to": {"kind": "RUN", "id": str(run.id)}, } @@ -36,7 +35,6 @@ def run_parents_to_json(runs: list[Run]): def operation_parent_to_json(operation: Operation): return { - "kind": "PARENT", "from": {"kind": "RUN", "id": str(operation.run_id)}, "to": {"kind": "OPERATION", "id": str(operation.id)}, } @@ -48,9 +46,8 @@ def operation_parents_to_json(operations: list[Operation]): def symlink_to_json(symlink: DatasetSymlink): return { - "kind": "SYMLINK", - "from": {"kind": "DATASET", "id": symlink.from_dataset_id}, - "to": {"kind": "DATASET", "id": symlink.to_dataset_id}, + "from": {"kind": "DATASET", "id": str(symlink.from_dataset_id)}, + "to": {"kind": "DATASET", "id": str(symlink.to_dataset_id)}, "type": symlink.type.value, } @@ -61,7 +58,7 @@ def symlinks_to_json(symlinks: list[DatasetSymlink]): def schema_to_json(schema: Schema): return { - "id": schema.id, + "id": str(schema.id), "fields": [ { "description": None, @@ -79,11 +76,10 @@ def input_to_json(input: Input, granularity: Literal["OPERATION", "RUN", "JOB"]) elif granularity == "RUN": to = {"kind": "RUN", "id": str(input.run_id)} else: - to = {"kind": "JOB", "id": input.job_id} + to = {"kind": "JOB", "id": str(input.job_id)} return { - "kind": "INPUT", - "from": {"kind": "DATASET", "id": input.dataset_id}, + "from": {"kind": "DATASET", "id": str(input.dataset_id)}, "to": to, "num_bytes": input.num_bytes, "num_rows": input.num_rows, @@ -106,12 +102,11 @@ def output_to_json(output: Output, granularity: Literal["OPERATION", "RUN", "JOB elif granularity == "RUN": from_ = {"kind": "RUN", "id": str(output.run_id)} else: - from_ = {"kind": "JOB", "id": output.job_id} + from_ = {"kind": "JOB", "id": str(output.job_id)} return { - "kind": "OUTPUT", "from": from_, - "to": {"kind": "DATASET", "id": output.dataset_id}, + "to": {"kind": "DATASET", "id": str(output.dataset_id)}, "type": output.type.value if output.type else None, "num_bytes": output.num_bytes, "num_rows": output.num_rows, @@ -134,7 +129,7 @@ def address_to_json(address: Address): def location_to_json(location: Location): return { - "id": location.id, + "id": str(location.id), "name": location.name, "type": location.type, "addresses": [address_to_json(address) for address in location.addresses], @@ -143,13 +138,12 @@ def location_to_json(location: Location): def locations_to_json(locations: list[Location]): - return [location_to_json(location) for location in sorted(locations, key=lambda x: x.id)] + return {str(location.id): location_to_json(location) for location in locations} def dataset_to_json(dataset: Dataset): return { - "kind": "DATASET", - "id": dataset.id, + "id": str(dataset.id), "format": dataset.format, "name": dataset.name, "location": location_to_json(dataset.location), @@ -157,13 +151,12 @@ def dataset_to_json(dataset: Dataset): def datasets_to_json(datasets: list[Dataset]): - return [dataset_to_json(dataset) for dataset in sorted(datasets, key=lambda x: x.id)] + return {str(dataset.id): dataset_to_json(dataset) for dataset in datasets} def job_to_json(job: Job): return { - "kind": "JOB", - "id": job.id, + "id": str(job.id), "name": job.name, "type": job.type.value, "location": location_to_json(job.location), @@ -171,7 +164,7 @@ def job_to_json(job: Job): def jobs_to_json(jobs: list[Job]): - return [job_to_json(job) for job in sorted(jobs, key=lambda x: x.id)] + return {str(job.id): job_to_json(job) for job in jobs} def user_to_json(user: User): @@ -180,9 +173,8 @@ def user_to_json(user: User): def run_to_json(run: Run): return { - "kind": "RUN", "id": str(run.id), - "job_id": run.job_id, + "job_id": str(run.job_id), "created_at": format_datetime(run.created_at), "parent_run_id": str(run.parent_run_id), "status": run.status.name, @@ -199,12 +191,11 @@ def run_to_json(run: Run): def runs_to_json(runs: list[Run]): - return [run_to_json(run) for run in sorted(runs, key=lambda x: x.id)] + return {str(run.id): run_to_json(run) for run in runs} def operation_to_json(operation: Operation): return { - "kind": "OPERATION", "id": str(operation.id), "created_at": format_datetime(operation.created_at), "run_id": str(operation.run_id), @@ -220,4 +211,4 @@ def operation_to_json(operation: Operation): def operations_to_json(operations: list[Operation]): - return [operation_to_json(operation) for operation in sorted(operations, key=lambda x: x.id)] + return {str(operation.id): operation_to_json(operation) for operation in operations}