diff --git a/.coveragerc b/.coveragerc index 48e1e747..d93d55b5 100644 --- a/.coveragerc +++ b/.coveragerc @@ -3,7 +3,6 @@ concurrency = greenlet branch = True omit = - tests/* examples/* docs/* # omit anything in a .local directory anywhere diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 82c6cb9b..a9339084 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -31,19 +31,18 @@ jobs: - "3.9" - "3.10" - "3.11" + - "3.12" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - name: Install poetry - run: | - python -m pip install --upgrade pip poetry==1.8.2 pre-commit - poetry config virtualenvs.create false --local + - name: Install build dependencies + run: python -m pip install --upgrade pip pre-commit hatch - name: Install dependencies - run: poetry install --all-extras + run: hatch env create - name: Lint code run: pre-commit run --all-files @@ -56,6 +55,7 @@ jobs: - "3.9" - "3.10" - "3.11" + - "3.12" db-url: - "sqlite+aiosqlite:///./db.sqlite3" - "postgresql+asyncpg://user:passwd@localhost:5432/app" @@ -81,29 +81,25 @@ jobs: # Maps tcp port 5432 on service container to the host - 5432:5432 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - name: Install poetry - run: | - python -m pip install --upgrade pip poetry==1.8.2 pre-commit - poetry config virtualenvs.create false --local - - name: Install dependencies - run: poetry install --all-extras - - name: Test with pytest - run: | - flags="-s -vv --cov=fastapi_jsonapi --cov-config .coveragerc --cov-report=xml" - pytest $flags tests/ + - name: ♿ Install dependencies + run: python -m pip install --upgrade pip build "hatch==1.12.0" + - name: 🔨 Build package + run: python -m build + - name: Test with pytest and coverage through hatch + run: hatch run test.py${{ matrix.python-version }}:cov-xml -v env: TESTING_DB_URL: ${{ matrix.db-url }} - name: Upload coverage data to Codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: token: ${{ secrets.CODECOV_TOKEN }} - files: coverage.xml + files: coverage/${{ matrix.python-version }}/coverage.xml flags: unittests name: py-${{ matrix.python-version }}-db-${{ startsWith(matrix.db-url, 'sqlite') && 'sqlite' || startsWith(matrix.db-url, 'postgres') && 'postgres' || 'unknown' }} fail_ci_if_error: true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f6090575..97ebad50 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,21 +1,27 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: "v3.2.0" + rev: "v4.6.0" hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml + - id: check-toml + - id: check-json - id: check-added-large-files - id: mixed-line-ending - - id: requirements-txt-fixer + - id: check-case-conflict - - repo: https://github.com/psf/black - rev: "23.3.0" + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.4.8 hooks: - - id: black + # Run the linter. + - id: ruff + args: + - "--fix" + # Run the formatter. + - id: ruff-format - - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.1.8" + - repo: https://github.com/psf/black + rev: "24.4.2" hooks: - - id: ruff - args: [--fix, --exit-non-zero-on-fix, --unsafe-fixes] + - id: black diff --git a/codecov.yaml b/codecov.yaml index fcde1e46..3b5e6cd3 100644 --- a/codecov.yaml +++ b/codecov.yaml @@ -21,4 +21,5 @@ comment: require_changes: no branches: - main - after_n_builds: 6 # 3 python versions by 2 dbs + - dev-3.x + after_n_builds: 8 # 4 python versions by 2 dbs diff --git a/docs/http_snippets/update_snippets_with_responses.py b/docs/http_snippets/update_snippets_with_responses.py index 483f5f16..53f86e25 100644 --- a/docs/http_snippets/update_snippets_with_responses.py +++ b/docs/http_snippets/update_snippets_with_responses.py @@ -4,7 +4,7 @@ from http import HTTPStatus import requests -import simplejson +import json import argparse parser = argparse.ArgumentParser() @@ -83,7 +83,7 @@ def run_request_for_module(module_name: str): if response.content: # TODO: handle non-json response? http_response_text.append( - simplejson.dumps( + json.dumps( response.json(), sort_keys=SORT_KEYS_ON_DUMP, indent=2, diff --git a/docs/python_snippets/client_generated_id/schematic_example.py b/docs/python_snippets/client_generated_id/schematic_example.py index 7c88acb3..905abd80 100644 --- a/docs/python_snippets/client_generated_id/schematic_example.py +++ b/docs/python_snippets/client_generated_id/schematic_example.py @@ -1,9 +1,14 @@ import sys from pathlib import Path -from typing import ClassVar +from typing import ( + ClassVar, + Annotated, +) import uvicorn from fastapi import APIRouter, Depends, FastAPI + +from fastapi_jsonapi.types_metadata import ClientCanSetId from pydantic import ConfigDict from fastapi_jsonapi.schema_base import Field, BaseModel as PydanticBaseModel from sqlalchemy import Column, Integer, Text @@ -52,7 +57,7 @@ class UserPatchSchema(UserAttributesBaseSchema): class UserInSchema(UserAttributesBaseSchema): """User input schema.""" - id: int = Field(json_schema_extra={"client_can_set_id": True}) + id: Annotated[int, ClientCanSetId()] async def get_session(): diff --git a/docs/python_snippets/relationships/models.py b/docs/python_snippets/relationships/models.py index 39c3351c..93908c99 100644 --- a/docs/python_snippets/relationships/models.py +++ b/docs/python_snippets/relationships/models.py @@ -2,10 +2,12 @@ from sqlalchemy.orm import relationship from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from examples.api_for_sqlalchemy.utils.sqlalchemy.timestamps_mixin import TimestampsMixin +# TODO: sqla 2.0 -class User(Base, BaseModelMixin): + +class User(Base, TimestampsMixin): __tablename__ = "users" id = Column(Integer, primary_key=True, autoincrement=True) name: str = Column(String) @@ -15,7 +17,7 @@ class User(Base, BaseModelMixin): computers = relationship("Computer", back_populates="user", uselist=True) -class Computer(Base, BaseModelMixin): +class Computer(Base, TimestampsMixin): __tablename__ = "computers" id = Column(Integer, primary_key=True, autoincrement=True) @@ -24,7 +26,7 @@ class Computer(Base, BaseModelMixin): user = relationship("User", back_populates="computers") -class UserBio(Base, BaseModelMixin): +class UserBio(Base, TimestampsMixin): __tablename__ = "user_bio" id = Column(Integer, primary_key=True, autoincrement=True) birth_city: str = Column(String, nullable=False, default="", server_default="") diff --git a/docs/python_snippets/relationships/relationships_info_example.py b/docs/python_snippets/relationships/relationships_info_example.py index d254bc70..4a19a57f 100644 --- a/docs/python_snippets/relationships/relationships_info_example.py +++ b/docs/python_snippets/relationships/relationships_info_example.py @@ -1,8 +1,10 @@ -from typing import Optional +from __future__ import annotations + +from typing import Annotated from pydantic import BaseModel as PydanticBaseModel, ConfigDict -from fastapi_jsonapi.schema_base import Field, RelationshipInfo +from fastapi_jsonapi.types_metadata import RelationshipInfo class BaseModel(PydanticBaseModel): @@ -12,21 +14,19 @@ class BaseModel(PydanticBaseModel): class UserBaseSchema(BaseModel): id: int name: str - bio: Optional["UserBioSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="user_bio", - ), - }, - ) - computers: Optional["ComputerSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="computer", - many=True, - ), - }, - ) + bio: Annotated[ + UserBioSchema | None, + RelationshipInfo( + resource_type="user_bio", + ), + ] + computers: Annotated[ + ComputerSchema | None, + RelationshipInfo( + resource_type="computer", + many=True, + ), + ] class UserSchema(BaseModel): @@ -34,27 +34,25 @@ class UserSchema(BaseModel): name: str -class UserBioBaseSchema(BaseModel): +class UserBioSchema(BaseModel): birth_city: str favourite_movies: str # keys_to_ids_list: Optional[dict[str, list[int]]] = None - user: "UserSchema" = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="user", - ), - }, - ) + user: Annotated[ + UserSchema | None, + RelationshipInfo( + resource_type="user", + ), + ] -class ComputerBaseSchema(BaseModel): +class ComputerSchema(BaseModel): id: int name: str - user: Optional["UserSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="user", - ), - }, - ) + user: Annotated[ + UserSchema | None, + RelationshipInfo( + resource_type="user", + ), + ] diff --git a/docs/requirements.txt b/docs/requirements.txt index c13c65a9..75155be4 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,7 +1,7 @@ -fastapi<0.100.0 -pydantic<2 -simplejson>=3.17.6 +fastapi>=0.100.0 +pydantic>=2 +orjson>=3.10.7 sphinx sphinx_rtd_theme -sqlalchemy<2 +sqlalchemy>=2 tortoise-orm>=0.19.3 diff --git a/examples/api_for_sqlalchemy/api/views_base.py b/examples/api_for_sqlalchemy/api/views_base.py index c2ccd97c..03693974 100644 --- a/examples/api_for_sqlalchemy/api/views_base.py +++ b/examples/api_for_sqlalchemy/api/views_base.py @@ -1,4 +1,4 @@ -from typing import ClassVar, Dict +from typing import ClassVar from fastapi import Depends from pydantic import BaseModel, ConfigDict @@ -17,7 +17,7 @@ class SessionDependency(BaseModel): model_config = ConfigDict(arbitrary_types_allowed=True) -def handler(view: ViewBase, dto: SessionDependency) -> Dict: +def handler(view: ViewBase, dto: SessionDependency) -> dict: return {"session": dto.session} diff --git a/examples/api_for_sqlalchemy/asgi.py b/examples/api_for_sqlalchemy/asgi.py index 95967617..0e665931 100644 --- a/examples/api_for_sqlalchemy/asgi.py +++ b/examples/api_for_sqlalchemy/asgi.py @@ -2,4 +2,6 @@ from examples.api_for_sqlalchemy.main import create_app -app = create_app() +app = create_app( + create_custom_static_urls=True, +) diff --git a/examples/api_for_sqlalchemy/extensions/sqlalchemy.py b/examples/api_for_sqlalchemy/extensions/sqlalchemy.py index d57f305f..0b965e16 100644 --- a/examples/api_for_sqlalchemy/extensions/sqlalchemy.py +++ b/examples/api_for_sqlalchemy/extensions/sqlalchemy.py @@ -1,18 +1,22 @@ from sqlalchemy.engine import make_url from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.orm import DeclarativeBase, sessionmaker +from sqlalchemy.orm import ( + DeclarativeBase, + Mapped, + mapped_column, + sessionmaker, +) from examples.api_for_sqlalchemy import config class Base(DeclarativeBase): - pass + id: Mapped[int] = mapped_column(primary_key=True) def async_session() -> sessionmaker: engine = create_async_engine(url=make_url(config.SQLA_URI), echo=config.SQLA_ECHO) - _async_session = sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) - return _async_session + return sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) class Connector: diff --git a/examples/api_for_sqlalchemy/main.py b/examples/api_for_sqlalchemy/main.py index 92256676..84e04683 100644 --- a/examples/api_for_sqlalchemy/main.py +++ b/examples/api_for_sqlalchemy/main.py @@ -1,19 +1,17 @@ -""" -Main module for w_mount service. - -In module placed db initialization functions, app factory. -""" import sys +from contextlib import asynccontextmanager from pathlib import Path import uvicorn from fastapi import FastAPI +from fastapi.responses import ORJSONResponse from sqlalchemy.engine import make_url from sqlalchemy.ext.asyncio import create_async_engine from examples.api_for_sqlalchemy import config from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base from examples.api_for_sqlalchemy.urls import add_routes +from examples.api_for_sqlalchemy.util import register_static_docs_routes from fastapi_jsonapi import init CURRENT_FILE = Path(__file__).resolve() @@ -31,21 +29,31 @@ async def sqlalchemy_init() -> None: await conn.run_sync(Base.metadata.create_all) -def create_app() -> FastAPI: - """ - Create app factory. +@asynccontextmanager +async def lifespan(app: FastAPI): + # startup + await sqlalchemy_init() + yield + # shutdown + # await db_helper.dispose() + - :return: app - """ +def create_app( + *, + create_custom_static_urls: bool = False, +) -> FastAPI: app = FastAPI( title="FastAPI and SQLAlchemy", - debug=True, - openapi_url="/openapi.json", - docs_url="/docs", + default_response_class=ORJSONResponse, + lifespan=lifespan, + docs_url=None if create_custom_static_urls else "/docs", + redoc_url=None if create_custom_static_urls else "/redoc", ) + if create_custom_static_urls: + register_static_docs_routes(app) + app.config = {"MAX_INCLUDE_DEPTH": 5} add_routes(app) - app.on_event("startup")(sqlalchemy_init) init(app) return app @@ -53,7 +61,7 @@ def create_app() -> FastAPI: if __name__ == "__main__": uvicorn.run( "asgi:app", - host="0.0.0.0", + host="0.0.0.0", # noqa: S104 port=8082, reload=True, app_dir=str(CURRENT_DIR), diff --git a/examples/api_for_sqlalchemy/models/child.py b/examples/api_for_sqlalchemy/models/child.py index cc3bafdf..75e475ae 100644 --- a/examples/api_for_sqlalchemy/models/child.py +++ b/examples/api_for_sqlalchemy/models/child.py @@ -1,14 +1,17 @@ +from typing import TYPE_CHECKING + from sqlalchemy.orm import Mapped, mapped_column, relationship from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.models import ParentToChildAssociation -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from examples.api_for_sqlalchemy.utils.sqlalchemy.timestamps_mixin import TimestampsMixin + +if TYPE_CHECKING: + from examples.api_for_sqlalchemy.models import ParentToChildAssociation -class Child(Base, BaseModelMixin): +class Child(Base, TimestampsMixin): __tablename__ = "right_table_children" - id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) name: Mapped[str] = mapped_column(nullable=False) parents: Mapped["ParentToChildAssociation"] = relationship( "ParentToChildAssociation", diff --git a/examples/api_for_sqlalchemy/models/computer.py b/examples/api_for_sqlalchemy/models/computer.py index 44ecdab3..67bee17f 100644 --- a/examples/api_for_sqlalchemy/models/computer.py +++ b/examples/api_for_sqlalchemy/models/computer.py @@ -1,17 +1,27 @@ -from sqlalchemy import Column, ForeignKey, Integer, String -from sqlalchemy.orm import relationship +from __future__ import annotations + +from typing import TYPE_CHECKING + +from sqlalchemy import ForeignKey +from sqlalchemy.orm import ( + Mapped, + mapped_column, + relationship, +) from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from examples.api_for_sqlalchemy.utils.sqlalchemy.timestamps_mixin import TimestampsMixin + +if TYPE_CHECKING: + from .user import User -class Computer(Base, BaseModelMixin): +class Computer(Base, TimestampsMixin): __tablename__ = "computers" - id = Column(Integer, primary_key=True, autoincrement=True) - name = Column(String, nullable=False) - user_id = Column(Integer, ForeignKey("users.id"), nullable=True) - user = relationship("User", back_populates="computers") + name: Mapped[str] + user_id: Mapped[int | None] = mapped_column(ForeignKey("users.id")) + user: Mapped[User] = relationship(back_populates="computers") def __repr__(self): return f"{self.__class__.__name__}(id={self.id}, name={self.name!r}, user_id={self.user_id})" diff --git a/examples/api_for_sqlalchemy/models/enums.py b/examples/api_for_sqlalchemy/models/enums.py deleted file mode 100644 index c37423fc..00000000 --- a/examples/api_for_sqlalchemy/models/enums.py +++ /dev/null @@ -1,11 +0,0 @@ -from fastapi_jsonapi.data_layers.fields.enum import Enum - - -class UserStatusEnum(str, Enum): - """ - Status user. - """ - - active = "active" - archive = "archive" - block = "block" diff --git a/examples/api_for_sqlalchemy/models/parent.py b/examples/api_for_sqlalchemy/models/parent.py index 81fdc3da..2536ec77 100644 --- a/examples/api_for_sqlalchemy/models/parent.py +++ b/examples/api_for_sqlalchemy/models/parent.py @@ -1,16 +1,21 @@ -from sqlalchemy import Column, Integer, String -from sqlalchemy.orm import relationship +from typing import TYPE_CHECKING + +from sqlalchemy.orm import ( + Mapped, + relationship, +) from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from examples.api_for_sqlalchemy.utils.sqlalchemy.timestamps_mixin import TimestampsMixin + +if TYPE_CHECKING: + from examples.api_for_sqlalchemy.models import ParentToChildAssociation -class Parent(Base, BaseModelMixin): +class Parent(Base, TimestampsMixin): __tablename__ = "left_table_parents" - id = Column(Integer, primary_key=True, autoincrement=True) - name = Column(String, nullable=False) - children = relationship( - "ParentToChildAssociation", + name: Mapped[str] + children: Mapped[list["ParentToChildAssociation"]] = relationship( back_populates="parent", ) diff --git a/examples/api_for_sqlalchemy/models/parent_child_association.py b/examples/api_for_sqlalchemy/models/parent_child_association.py index 85c48ea7..9f6bbcf7 100644 --- a/examples/api_for_sqlalchemy/models/parent_child_association.py +++ b/examples/api_for_sqlalchemy/models/parent_child_association.py @@ -1,11 +1,21 @@ -from sqlalchemy import Column, ForeignKey, Index, Integer, String -from sqlalchemy.orm import relationship +from typing import TYPE_CHECKING + +from sqlalchemy import ForeignKey, Index, String +from sqlalchemy.orm import ( + Mapped, + mapped_column, + relationship, +) from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from examples.api_for_sqlalchemy.utils.sqlalchemy.timestamps_mixin import TimestampsMixin + +if TYPE_CHECKING: + from .child import Child + from .parent import Parent -class ParentToChildAssociation(Base, BaseModelMixin): +class ParentToChildAssociation(Base, TimestampsMixin): __table_args__ = ( # JSON:API requires `id` field on any model, # so we can't create a composite PK here @@ -20,24 +30,18 @@ class ParentToChildAssociation(Base, BaseModelMixin): __tablename__ = "parent_to_child_association_table" - id = Column(Integer, primary_key=True, autoincrement=True) - - parent_left_id = Column( + parent_left_id: Mapped[int] = mapped_column( ForeignKey("left_table_parents.id"), - nullable=False, ) - child_right_id = Column( + child_right_id: Mapped[int] = mapped_column( ForeignKey("right_table_children.id"), - nullable=False, ) - extra_data = Column(String(50)) - parent = relationship( - "Parent", + extra_data: Mapped[str] = mapped_column(String(50)) + parent: Mapped["Parent"] = relationship( back_populates="children", # primaryjoin="ParentToChildAssociation.parent_left_id == Parent.id", ) - child = relationship( - "Child", + child: Mapped["Child"] = relationship( back_populates="parents", # primaryjoin="ParentToChildAssociation.child_right_id == Child.id", ) diff --git a/examples/api_for_sqlalchemy/models/post.py b/examples/api_for_sqlalchemy/models/post.py index 7295beb3..601023e3 100644 --- a/examples/api_for_sqlalchemy/models/post.py +++ b/examples/api_for_sqlalchemy/models/post.py @@ -1,22 +1,36 @@ """Post model.""" -from sqlalchemy import Column, ForeignKey, Integer, String, Text -from sqlalchemy.orm import relationship +from typing import TYPE_CHECKING + +from sqlalchemy import ForeignKey, Text +from sqlalchemy.orm import ( + Mapped, + mapped_column, + relationship, +) from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from examples.api_for_sqlalchemy.utils.sqlalchemy.timestamps_mixin import TimestampsMixin + +if TYPE_CHECKING: + from .post_comment import PostComment + from .user import User -class Post(Base, BaseModelMixin): +class Post(Base, TimestampsMixin): __tablename__ = "posts" - id = Column(Integer, primary_key=True, autoincrement=True) - title = Column(String, nullable=False) - body = Column(Text, nullable=False, default="", server_default="") - user_id = Column(Integer, ForeignKey("users.id"), nullable=False, unique=False) - user = relationship("User", back_populates="posts", uselist=False) + title: Mapped[str] + body: Mapped[str] = mapped_column(Text, nullable=False, default="", server_default="") + + user_id: Mapped[int] = mapped_column(ForeignKey("users.id")) + user: Mapped["User"] = relationship(back_populates="posts", uselist=False) - comments = relationship("PostComment", back_populates="post", uselist=True) + comments: Mapped[list["PostComment"]] = relationship( + "PostComment", + back_populates="post", + uselist=True, + ) def __repr__(self): return f"{self.__class__.__name__}(id={self.id} title={self.title!r} user_id={self.user_id})" diff --git a/examples/api_for_sqlalchemy/models/post_comment.py b/examples/api_for_sqlalchemy/models/post_comment.py index ca509ae2..1563dd68 100644 --- a/examples/api_for_sqlalchemy/models/post_comment.py +++ b/examples/api_for_sqlalchemy/models/post_comment.py @@ -1,22 +1,32 @@ """Post Comment model.""" -from sqlalchemy import Column, ForeignKey, Integer, String -from sqlalchemy.orm import relationship +from typing import TYPE_CHECKING + +from sqlalchemy import ForeignKey, Integer, String +from sqlalchemy.orm import ( + Mapped, + mapped_column, + relationship, +) from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from examples.api_for_sqlalchemy.utils.sqlalchemy.timestamps_mixin import TimestampsMixin + +if TYPE_CHECKING: + from .post import Post + from .user import User -class PostComment(Base, BaseModelMixin): +class PostComment(Base, TimestampsMixin): __tablename__ = "post_comments" - id = Column(Integer, primary_key=True, autoincrement=True) - text: str = Column(String, nullable=False, default="", server_default="") - post_id = Column(Integer, ForeignKey("posts.id"), nullable=False, unique=False) - post = relationship("Post", back_populates="comments", uselist=False) + text: Mapped[str] = mapped_column(String, nullable=False, default="", server_default="") + + post_id: Mapped[int] = mapped_column(Integer, ForeignKey("posts.id"), nullable=False, unique=False) + post: Mapped["Post"] = relationship(back_populates="comments", uselist=False) - author_id = Column(Integer, ForeignKey("users.id"), nullable=False, unique=False) - author = relationship("User", back_populates="comments", uselist=False) + author_id: Mapped[int] = mapped_column(ForeignKey("users.id"), nullable=False, unique=False) + author: Mapped["User"] = relationship(back_populates="comments", uselist=False) def __repr__(self): return ( diff --git a/examples/api_for_sqlalchemy/models/schemas/__init__.py b/examples/api_for_sqlalchemy/models/schemas/__init__.py index a04a683b..1cc6ded2 100644 --- a/examples/api_for_sqlalchemy/models/schemas/__init__.py +++ b/examples/api_for_sqlalchemy/models/schemas/__init__.py @@ -1,6 +1,3 @@ -"""schemas package.""" - - from .child import ( ChildInSchema, ChildPatchSchema, diff --git a/examples/api_for_sqlalchemy/models/schemas/child.py b/examples/api_for_sqlalchemy/models/schemas/child.py index f8bd955e..f2181dc2 100644 --- a/examples/api_for_sqlalchemy/models/schemas/child.py +++ b/examples/api_for_sqlalchemy/models/schemas/child.py @@ -1,8 +1,16 @@ -from typing import TYPE_CHECKING, List +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) from pydantic import ConfigDict -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo if TYPE_CHECKING: from .parent_child_association import ParentToChildAssociationSchema @@ -14,15 +22,14 @@ class ChildBaseSchema(BaseModel): model_config = ConfigDict(from_attributes=True) name: str - parents: List["ParentToChildAssociationSchema"] = Field( - default=None, - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="parent_child_association", - many=True, - ), - }, - ) + + parents: Annotated[ + list[ParentToChildAssociationSchema] | None, + RelationshipInfo( + resource_type="parent_child_association", + many=True, + ), + ] = None class ChildPatchSchema(ChildBaseSchema): diff --git a/examples/api_for_sqlalchemy/models/schemas/computer.py b/examples/api_for_sqlalchemy/models/schemas/computer.py index 84387643..e9b56e27 100644 --- a/examples/api_for_sqlalchemy/models/schemas/computer.py +++ b/examples/api_for_sqlalchemy/models/schemas/computer.py @@ -1,10 +1,14 @@ -"""Computer schemas module.""" +from __future__ import annotations -from typing import TYPE_CHECKING, Optional +from typing import ( + TYPE_CHECKING, + Annotated, +) from pydantic import ConfigDict -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo if TYPE_CHECKING: from .user import UserSchema @@ -16,13 +20,12 @@ class ComputerBaseSchema(BaseModel): model_config = ConfigDict(from_attributes=True) name: str - user: Optional["UserSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="user", - ), - }, - ) + user: Annotated[ + UserSchema | None, + RelationshipInfo( + resource_type="user", + ), + ] = None class ComputerPatchSchema(ComputerBaseSchema): diff --git a/examples/api_for_sqlalchemy/models/schemas/parent.py b/examples/api_for_sqlalchemy/models/schemas/parent.py index 74a923c4..ec8444af 100644 --- a/examples/api_for_sqlalchemy/models/schemas/parent.py +++ b/examples/api_for_sqlalchemy/models/schemas/parent.py @@ -1,8 +1,16 @@ -from typing import TYPE_CHECKING, List +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) from pydantic import ConfigDict -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo if TYPE_CHECKING: from .parent_child_association import ParentToChildAssociationSchema @@ -15,13 +23,13 @@ class ParentBaseSchema(BaseModel): name: str - children: List["ParentToChildAssociationSchema"] = Field( - default=None, - relationship=RelationshipInfo( + children: Annotated[ + list[ParentToChildAssociationSchema] | None, + RelationshipInfo( resource_type="parent_child_association", many=True, ), - ) + ] = None class ParentPatchSchema(ParentBaseSchema): diff --git a/examples/api_for_sqlalchemy/models/schemas/parent_child_association.py b/examples/api_for_sqlalchemy/models/schemas/parent_child_association.py index 3062a710..b9e4e0cf 100644 --- a/examples/api_for_sqlalchemy/models/schemas/parent_child_association.py +++ b/examples/api_for_sqlalchemy/models/schemas/parent_child_association.py @@ -1,6 +1,14 @@ -from typing import TYPE_CHECKING +from __future__ import annotations -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo +from typing import ( + TYPE_CHECKING, + Annotated, +) + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo if TYPE_CHECKING: from .child import ChildSchema @@ -11,16 +19,16 @@ class ParentToChildAssociationSchema(BaseModel): id: int extra_data: str - parent: "ParentSchema" = Field( - default=None, - relationship=RelationshipInfo( + parent: Annotated[ + ParentSchema | None, + RelationshipInfo( resource_type="parent", ), - ) + ] = None - child: "ChildSchema" = Field( - default=None, - relationship=RelationshipInfo( + child: Annotated[ + ChildSchema | None, + RelationshipInfo( resource_type="child", ), - ) + ] = None diff --git a/examples/api_for_sqlalchemy/models/schemas/post.py b/examples/api_for_sqlalchemy/models/schemas/post.py index 2320551b..d9972fc4 100644 --- a/examples/api_for_sqlalchemy/models/schemas/post.py +++ b/examples/api_for_sqlalchemy/models/schemas/post.py @@ -1,11 +1,18 @@ -"""Post schemas module.""" +from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING, List +from typing import ( + TYPE_CHECKING, + Annotated, +) from pydantic import ConfigDict -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo +from fastapi_jsonapi.schema_base import ( + BaseModel, + Field, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo if TYPE_CHECKING: from .post_comment import PostCommentSchema @@ -20,18 +27,20 @@ class PostBaseSchema(BaseModel): title: str body: str - user: "UserSchema" = Field( - relationship=RelationshipInfo( + user: Annotated[ + UserSchema | None, + RelationshipInfo( resource_type="user", ), - ) + ] = None - comments: List["PostCommentSchema"] = Field( - relationship=RelationshipInfo( + comments: Annotated[ + list[PostCommentSchema] | None, + RelationshipInfo( resource_type="post_comment", many=True, ), - ) + ] = None class PostPatchSchema(PostBaseSchema): diff --git a/examples/api_for_sqlalchemy/models/schemas/post_comment.py b/examples/api_for_sqlalchemy/models/schemas/post_comment.py index 3f01679f..1a396876 100644 --- a/examples/api_for_sqlalchemy/models/schemas/post_comment.py +++ b/examples/api_for_sqlalchemy/models/schemas/post_comment.py @@ -1,10 +1,18 @@ -"""Post Comment schemas module.""" +from __future__ import annotations + from datetime import datetime -from typing import TYPE_CHECKING +from typing import ( + TYPE_CHECKING, + Annotated, +) from pydantic import ConfigDict -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo +from fastapi_jsonapi.schema_base import ( + BaseModel, + Field, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo if TYPE_CHECKING: from .post import PostSchema @@ -20,16 +28,18 @@ class PostCommentBaseSchema(BaseModel): created_at: datetime = Field(description="Create datetime") modified_at: datetime = Field(description="Update datetime") - post: "PostSchema" = Field( - relationship=RelationshipInfo( + post: Annotated[ + PostSchema | None, + RelationshipInfo( resource_type="post", ), - ) - author: "UserSchema" = Field( - relationship=RelationshipInfo( + ] = None + author: Annotated[ + UserSchema | None, + RelationshipInfo( resource_type="user", ), - ) + ] = None class PostCommentPatchSchema(PostCommentBaseSchema): diff --git a/examples/api_for_sqlalchemy/models/schemas/user.py b/examples/api_for_sqlalchemy/models/schemas/user.py index 423a4d3c..f0bf47df 100644 --- a/examples/api_for_sqlalchemy/models/schemas/user.py +++ b/examples/api_for_sqlalchemy/models/schemas/user.py @@ -1,13 +1,16 @@ -"""User schemas module.""" from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING, List, Optional +from typing import ( + TYPE_CHECKING, + Annotated, +) from pydantic import ConfigDict -from examples.api_for_sqlalchemy.models.enums import UserStatusEnum -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo +# from examples.api_for_sqlalchemy.models.enums import UserStatusEnum +from fastapi_jsonapi.schema_base import BaseModel, Field +from fastapi_jsonapi.types_metadata import RelationshipInfo if TYPE_CHECKING: from .computer import ComputerSchema @@ -20,36 +23,34 @@ class UserBaseSchema(BaseModel): model_config = ConfigDict(from_attributes=True) - class Enum: - """User enums.""" - - status = UserStatusEnum - - first_name: Optional[str] = None - last_name: Optional[str] = None - age: Optional[int] = None - status: UserStatusEnum = Field(default=UserStatusEnum.active) + first_name: str | None = None + last_name: str | None = None + age: int | None = None + # status: UserStatusEnum = Field(default=UserStatusEnum.active) email: str | None = None - posts: Optional[List["PostSchema"]] = Field( - relationship=RelationshipInfo( + posts: Annotated[ + list[PostSchema] | None, + RelationshipInfo( resource_type="post", many=True, ), - ) + ] = None - bio: Optional["UserBioSchema"] = Field( - relationship=RelationshipInfo( + bio: Annotated[ + UserBioSchema | None, + RelationshipInfo( resource_type="user_bio", ), - ) + ] = None - computers: Optional[List["ComputerSchema"]] = Field( - relationship=RelationshipInfo( + computers: Annotated[ + list[ComputerSchema] | None, + RelationshipInfo( resource_type="computer", many=True, ), - ) + ] = None class UserPatchSchema(UserBaseSchema): diff --git a/examples/api_for_sqlalchemy/models/schemas/user_bio.py b/examples/api_for_sqlalchemy/models/schemas/user_bio.py index 42c71d3e..060acb76 100644 --- a/examples/api_for_sqlalchemy/models/schemas/user_bio.py +++ b/examples/api_for_sqlalchemy/models/schemas/user_bio.py @@ -1,11 +1,15 @@ -"""User Bio schemas module.""" +from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING +from typing import ( + TYPE_CHECKING, + Annotated, +) from pydantic import ConfigDict -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo +from fastapi_jsonapi.schema_base import BaseModel, Field +from fastapi_jsonapi.types_metadata import RelationshipInfo if TYPE_CHECKING: from .user import UserSchema @@ -20,13 +24,12 @@ class UserBioBaseSchema(BaseModel): favourite_movies: str # keys_to_ids_list: Optional[Dict[str, List[int]]] = None - user: "UserSchema" = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="user", - ), - }, - ) + user: Annotated[ + UserSchema | None, + RelationshipInfo( + resource_type="user", + ), + ] = None class UserBioPatchSchema(UserBioBaseSchema): diff --git a/examples/api_for_sqlalchemy/models/user.py b/examples/api_for_sqlalchemy/models/user.py index bd088bfe..3113fe71 100644 --- a/examples/api_for_sqlalchemy/models/user.py +++ b/examples/api_for_sqlalchemy/models/user.py @@ -1,28 +1,36 @@ """User model.""" + from __future__ import annotations -from sqlalchemy import Column, Integer, String -from sqlalchemy.orm import relationship +from typing import TYPE_CHECKING + +from sqlalchemy.orm import ( + Mapped, + relationship, +) from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.models.enums import UserStatusEnum -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin -from examples.api_for_sqlalchemy.utils.sqlalchemy.fields.enum import EnumColumn +from examples.api_for_sqlalchemy.utils.sqlalchemy.timestamps_mixin import TimestampsMixin + +if TYPE_CHECKING: + from .computer import Computer + from .post import Post + from .post_comment import PostComment + from .user_bio import UserBio -class User(Base, BaseModelMixin): +class User(Base, TimestampsMixin): __tablename__ = "users" - id = Column(Integer, primary_key=True, autoincrement=True) - first_name: str = Column(String, nullable=True) - last_name: str = Column(String, nullable=True) - age: int = Column(Integer, nullable=True) - status = Column(EnumColumn(UserStatusEnum), nullable=False, default=UserStatusEnum.active) - email: str | None = Column(String, nullable=True) - - posts = relationship("Post", back_populates="user", uselist=True) - bio = relationship("UserBio", back_populates="user", uselist=False) - comments = relationship("PostComment", back_populates="author", uselist=True) - computers = relationship("Computer", back_populates="user", uselist=True) + + first_name: Mapped[str | None] + last_name: Mapped[str | None] + age: Mapped[int | None] + email: Mapped[str | None] + + posts: Mapped[list[Post]] = relationship("Post", back_populates="user", uselist=True) + bio: Mapped[UserBio] = relationship("UserBio", back_populates="user", uselist=False) + comments: Mapped[list[PostComment]] = relationship("PostComment", back_populates="author", uselist=True) + computers: Mapped[list[Computer]] = relationship("Computer", back_populates="user", uselist=True) def __repr__(self): return ( @@ -32,6 +40,3 @@ def __repr__(self): f" last_name={self.last_name!r}" ")" ) - - class Enum: - Status = UserStatusEnum diff --git a/examples/api_for_sqlalchemy/models/user_bio.py b/examples/api_for_sqlalchemy/models/user_bio.py index 0d194a60..615dcb4f 100644 --- a/examples/api_for_sqlalchemy/models/user_bio.py +++ b/examples/api_for_sqlalchemy/models/user_bio.py @@ -4,10 +4,11 @@ from sqlalchemy.orm import relationship from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from examples.api_for_sqlalchemy.utils.sqlalchemy.timestamps_mixin import TimestampsMixin -class UserBio(Base, BaseModelMixin): + +class UserBio(Base, TimestampsMixin): __tablename__ = "user_bio" id = Column(Integer, primary_key=True, autoincrement=True) birth_city: str = Column(String, nullable=False, default="", server_default="") diff --git a/examples/api_for_sqlalchemy/urls.py b/examples/api_for_sqlalchemy/urls.py index 55793c01..05fce79a 100644 --- a/examples/api_for_sqlalchemy/urls.py +++ b/examples/api_for_sqlalchemy/urls.py @@ -2,8 +2,6 @@ from typing import ( Any, - Dict, - List, ) from fastapi import ( @@ -47,7 +45,7 @@ ) -def add_routes(app: FastAPI) -> List[Dict[str, Any]]: +def add_routes(app: FastAPI) -> list[dict[str, Any]]: tags = [ { "name": "User", diff --git a/examples/api_for_sqlalchemy/util.py b/examples/api_for_sqlalchemy/util.py new file mode 100644 index 00000000..09712d42 --- /dev/null +++ b/examples/api_for_sqlalchemy/util.py @@ -0,0 +1,30 @@ +from fastapi import FastAPI +from fastapi.openapi.docs import ( + get_redoc_html, + get_swagger_ui_html, + get_swagger_ui_oauth2_redirect_html, +) + + +def register_static_docs_routes(app: FastAPI): + @app.get("/docs", include_in_schema=False) + async def custom_swagger_ui_html(): + return get_swagger_ui_html( + openapi_url=app.openapi_url, + title=app.title + " - Swagger UI", + oauth2_redirect_url=app.swagger_ui_oauth2_redirect_url, + swagger_js_url="https://unpkg.com/swagger-ui-dist@5/swagger-ui-bundle.js", + swagger_css_url="https://unpkg.com/swagger-ui-dist@5/swagger-ui.css", + ) + + @app.get(app.swagger_ui_oauth2_redirect_url, include_in_schema=False) + async def swagger_ui_redirect(): + return get_swagger_ui_oauth2_redirect_html() + + @app.get("/redoc", include_in_schema=False) + async def redoc_html(): + return get_redoc_html( + openapi_url=app.openapi_url, + title=app.title + " - ReDoc", + redoc_js_url="https://unpkg.com/redoc@next/bundles/redoc.standalone.js", + ) diff --git a/examples/api_for_sqlalchemy/utils/sqlalchemy/base_model_mixin.py b/examples/api_for_sqlalchemy/utils/sqlalchemy/base_model_mixin.py deleted file mode 100644 index b49851fa..00000000 --- a/examples/api_for_sqlalchemy/utils/sqlalchemy/base_model_mixin.py +++ /dev/null @@ -1,82 +0,0 @@ -from datetime import datetime -from typing import Generic, List, TypeVar - -from sqlalchemy import ( - delete, - func, - inspect, - select, -) -from sqlalchemy.engine import Result -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.ext.declarative import declared_attr -from sqlalchemy.orm import Mapped, mapped_column - -from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base - -TypeBase = TypeVar("TypeBase", bound="Base") -Model = TypeVar("Model", Base, Base) - - -class BaseModelMixin(Generic[Model]): - id: int - - @declared_attr - def created_at(cls) -> Mapped[datetime]: - """Дата создания записи""" - return mapped_column( - "created_at", - default=datetime.utcnow, - server_default=func.now(), - ) - - @declared_attr - def modified_at(cls) -> Mapped[datetime]: - """Дата изменения записи""" - return mapped_column( - "modified_at", - default=datetime.utcnow, - onupdate=datetime.utcnow, - server_onupdate=func.now(), - ) - - def __repr__(self) -> str: - return "<{}, pk: {}>".format( - self.__class__.__name__, - ", ".join(str(getattr(self, key.name)) for key in inspect(self.__class__).primary_key), - ) - - async def save(self, session: AsyncSession, commit: bool = True, flush: bool = False) -> "BaseModelMixin[Model]": - has_pk: bool = all(getattr(self, key.name) for key in inspect(self.__class__).primary_key) - if has_pk: - await session.merge(self) - else: - session.add(self) - if commit: - await session.commit() - elif flush: - await session.flush() - return self - - async def delete(self, session: AsyncSession, commit: bool = True) -> "BaseModelMixin[Model]": - await session.execute(delete(self)) - if commit: - await session.commit() - return self - - @classmethod - async def get_all(cls, session: AsyncSession) -> List[Model]: - result = await session.execute(select(Model)) - return result.scalars().all() - - @classmethod - async def get_by_id(cls, id_: int, session: AsyncSession) -> Model: - stmt = select(cls).where(cls.id == id_) - result: Result = await session.execute(stmt) - return result.scalar_one() - - @classmethod - async def get_or_none(cls, id_: int, session: AsyncSession) -> Model: - stmt = select(cls).where(cls.id == id_) - result: Result = await session.execute(stmt) - return result.scalar_one_or_none() diff --git a/examples/api_for_sqlalchemy/utils/sqlalchemy/fields/enum.py b/examples/api_for_sqlalchemy/utils/sqlalchemy/fields/enum.py deleted file mode 100644 index 1cfd8f37..00000000 --- a/examples/api_for_sqlalchemy/utils/sqlalchemy/fields/enum.py +++ /dev/null @@ -1,35 +0,0 @@ -from enum import Enum as EnumOriginal -from typing import Type, TypeVar, Union - -from sqlalchemy import types -from sqlalchemy.engine import Dialect - -from fastapi_jsonapi.data_layers.fields.mixins import MixinEnum - -TypeEnum = TypeVar("TypeEnum", bound=MixinEnum) - - -class EnumColumn(types.TypeDecorator): - """ - Обычный Enum из python сохраняет в БД значение, а не ключ, как делает Enum sqlalchemy - """ - - impl = types.Text - cache_ok = True - - def __init__(self, enum: Union[Type[EnumOriginal], Type[TypeEnum]], *args: list, **kwargs: dict): - if not issubclass(enum, EnumOriginal): - msg = f"{enum} is not a subtype of Enum" - raise TypeError(msg) - self.enum = enum - super().__init__(*args, **kwargs) - - def process_bind_param(self, value: Union[Type[EnumOriginal], Type[TypeEnum]], dialect: Dialect): - if isinstance(value, EnumOriginal) and isinstance(value.value, (str, int)): - return value.value - if isinstance(value, str): - return self.enum[value].value - return value - - def process_result_value(self, value: Union[str, int], dialect: Dialect): - return self.enum.value_to_enum(value) diff --git a/examples/api_for_sqlalchemy/utils/sqlalchemy/timestamps_mixin.py b/examples/api_for_sqlalchemy/utils/sqlalchemy/timestamps_mixin.py new file mode 100644 index 00000000..b91f190a --- /dev/null +++ b/examples/api_for_sqlalchemy/utils/sqlalchemy/timestamps_mixin.py @@ -0,0 +1,24 @@ +from datetime import UTC, datetime + +from sqlalchemy import func +from sqlalchemy.ext.declarative import declared_attr +from sqlalchemy.orm import Mapped, mapped_column + + +class TimestampsMixin: + @declared_attr + def created_at(cls) -> Mapped[datetime]: + return mapped_column( + "created_at", + default=datetime.utcnow, + server_default=func.now(), + ) + + @declared_attr + def modified_at(cls) -> Mapped[datetime]: + return mapped_column( + "modified_at", + default=datetime.now(UTC), + onupdate=datetime.now(UTC), + server_onupdate=func.now(), + ) diff --git a/examples/api_for_tortoise_orm/helpers/factories/meta_base.py b/examples/api_for_tortoise_orm/helpers/factories/meta_base.py index 40f51908..d8c3100a 100644 --- a/examples/api_for_tortoise_orm/helpers/factories/meta_base.py +++ b/examples/api_for_tortoise_orm/helpers/factories/meta_base.py @@ -1,12 +1,13 @@ """Base factory module.""" +from __future__ import annotations + from typing import ( Any, Callable, Dict, Generic, List, - Optional, Tuple, Type, TypeVar, @@ -15,7 +16,7 @@ from tortoise import models -from fastapi_jsonapi.data_layers.fields.enum import Enum +from enum import Enum from .exceptions import ( ExceptionAfterCommit, ExceptionBeforeCreate, @@ -53,7 +54,7 @@ class Meta(object): @classmethod async def _get_data( cls, - data: Optional[Dict[str, Any]] = None, + data: dict[str, Any] | None = None, mode: FactoryUseMode = FactoryUseMode.test, ) -> Dict: new_kwargs = dict() @@ -74,7 +75,7 @@ async def _get_data( async def create_batch( cls, count: int = 1, - data: Optional[Dict[str, Any]] = None, + data: dict[str, Any] | None = None, save: bool = True, mode: FactoryUseMode = FactoryUseMode.test, ) -> List[models.MODEL]: @@ -110,7 +111,7 @@ async def create_batch( @classmethod async def create( cls, - data: Optional[Dict[str, Any]] = None, + data: dict[str, Any] | None = None, header: Union[HeadersQueryStringManager, None] = None, save: bool = True, mode: FactoryUseMode = FactoryUseMode.test, diff --git a/examples/api_for_tortoise_orm/main.py b/examples/api_for_tortoise_orm/main.py index a87eb383..26668566 100644 --- a/examples/api_for_tortoise_orm/main.py +++ b/examples/api_for_tortoise_orm/main.py @@ -3,6 +3,7 @@ In module placed db initialization functions, app factory. """ + import sys from pathlib import Path diff --git a/examples/api_for_tortoise_orm/models/enums.py b/examples/api_for_tortoise_orm/models/enums.py index c37423fc..07dd994d 100644 --- a/examples/api_for_tortoise_orm/models/enums.py +++ b/examples/api_for_tortoise_orm/models/enums.py @@ -1,4 +1,4 @@ -from fastapi_jsonapi.data_layers.fields.enum import Enum +from enum import Enum class UserStatusEnum(str, Enum): diff --git a/examples/api_for_tortoise_orm/models/pydantic/__init__.py b/examples/api_for_tortoise_orm/models/pydantic/__init__.py index 3a2443b8..47e0b868 100644 --- a/examples/api_for_tortoise_orm/models/pydantic/__init__.py +++ b/examples/api_for_tortoise_orm/models/pydantic/__init__.py @@ -1,6 +1,5 @@ """W-mount schemas package.""" - from .user import ( UserPatchSchema, UserSchema, diff --git a/examples/api_for_tortoise_orm/models/tortoise/user.py b/examples/api_for_tortoise_orm/models/tortoise/user.py index 211f813c..7f17a6cb 100644 --- a/examples/api_for_tortoise_orm/models/tortoise/user.py +++ b/examples/api_for_tortoise_orm/models/tortoise/user.py @@ -1,6 +1,5 @@ """User model.""" - from tortoise import ( fields, models, diff --git a/examples/api_limited_methods.py b/examples/api_limited_methods.py index c90366cc..15ca2054 100644 --- a/examples/api_limited_methods.py +++ b/examples/api_limited_methods.py @@ -1,6 +1,6 @@ import sys from pathlib import Path -from typing import Any, ClassVar, Dict +from typing import Any, ClassVar import uvicorn from fastapi import APIRouter, Depends, FastAPI @@ -45,8 +45,7 @@ class UserSchema(UserAttributesBaseSchema): def async_session() -> async_sessionmaker: engine = create_async_engine(url=make_url(DB_URL)) - _async_session = async_sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) - return _async_session + return async_sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) class Connector: @@ -75,7 +74,7 @@ class SessionDependency(BaseModel): model_config = ConfigDict(arbitrary_types_allowed=True) -def session_dependency_handler(view: ViewBase, dto: SessionDependency) -> Dict[str, Any]: +def session_dependency_handler(view: ViewBase, dto: SessionDependency) -> dict[str, Any]: return { "session": dto.session, } @@ -151,6 +150,6 @@ def create_app() -> FastAPI: if __name__ == "__main__": uvicorn.run( app, - host="0.0.0.0", + host="0.0.0.0", # noqa: S104 port=8080, ) diff --git a/examples/api_minimal.py b/examples/api_minimal.py index 8b60d9e7..a5f8ca29 100644 --- a/examples/api_minimal.py +++ b/examples/api_minimal.py @@ -1,6 +1,6 @@ import sys from pathlib import Path -from typing import Any, ClassVar, Dict +from typing import Any, ClassVar import uvicorn from fastapi import APIRouter, Depends, FastAPI @@ -45,8 +45,7 @@ class UserSchema(UserAttributesBaseSchema): def async_session() -> async_sessionmaker: engine = create_async_engine(url=make_url(DB_URL)) - _async_session = async_sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) - return _async_session + return async_sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) class Connector: @@ -75,7 +74,7 @@ class SessionDependency(BaseModel): model_config = ConfigDict(arbitrary_types_allowed=True) -def session_dependency_handler(view: ViewBase, dto: SessionDependency) -> Dict[str, Any]: +def session_dependency_handler(view: ViewBase, dto: SessionDependency) -> dict[str, Any]: return { "session": dto.session, } @@ -146,6 +145,6 @@ def create_app() -> FastAPI: if __name__ == "__main__": uvicorn.run( app, - host="0.0.0.0", + host="0.0.0.0", # noqa: S104 port=8080, ) diff --git a/examples/custom_filter_example.py b/examples/custom_filter_example.py index ee8b88ee..1b563f26 100644 --- a/examples/custom_filter_example.py +++ b/examples/custom_filter_example.py @@ -1,39 +1,26 @@ -from typing import Any, Union +from typing import ( + Annotated, + Any, +) -from pydantic.fields import Field, FieldInfo -from sqlalchemy.orm import InstrumentedAttribute -from sqlalchemy.sql.elements import BinaryExpression, BooleanClauseList +from pydantic.fields import Field +from fastapi_jsonapi.contrib.sqla.filters import sql_filter_jsonb_contains from fastapi_jsonapi.schema_base import BaseModel -def jsonb_contains_sql_filter( - schema_field: FieldInfo, - model_column: InstrumentedAttribute, - value: dict[Any, Any], - operator: str, -) -> Union[BinaryExpression, BooleanClauseList]: - """ - Any SQLA (or Tortoise) magic here - - :param schema_field: - :param model_column: - :param value: any dict - :param operator: value 'jsonb_contains' - :return: one sqla filter expression - """ - return model_column.op("@>")(value) - - class PictureSchema(BaseModel): """ Now you can use `jsonb_contains` sql filter for this resource """ name: str - meta: dict[Any, Any] = Field( - default_factory=dict, - description="Any additional info in JSON format.", - example={"location": "Moscow", "spam": "eggs"}, - _jsonb_contains_sql_filter_=jsonb_contains_sql_filter, - ) + meta: Annotated[ + dict[str, Any], + sql_filter_jsonb_contains, + Field( + default_factory=dict, + description="Any additional info in JSON format.", + example={"location": "Moscow", "spam": "eggs"}, + ), + ] diff --git a/fastapi_jsonapi/VERSION b/fastapi_jsonapi/VERSION index 24ba9a38..56fea8a0 100644 --- a/fastapi_jsonapi/VERSION +++ b/fastapi_jsonapi/VERSION @@ -1 +1 @@ -2.7.0 +3.0.0 \ No newline at end of file diff --git a/fastapi_jsonapi/__init__.py b/fastapi_jsonapi/__init__.py index a9d73a18..ebb190a8 100644 --- a/fastapi_jsonapi/__init__.py +++ b/fastapi_jsonapi/__init__.py @@ -1,4 +1,5 @@ """JSON API utils package.""" + from pathlib import Path from fastapi import FastAPI diff --git a/fastapi_jsonapi/api.py b/fastapi_jsonapi/api.py index c79f07f4..5a8181bc 100644 --- a/fastapi_jsonapi/api.py +++ b/fastapi_jsonapi/api.py @@ -1,4 +1,6 @@ """JSON API router class.""" + +from collections.abc import Iterable from enum import Enum, auto from inspect import Parameter, Signature, signature from typing import ( @@ -6,14 +8,9 @@ Any, Callable, ClassVar, - Dict, - Iterable, - List, Literal, Optional, - Type, TypeVar, - Union, ) from fastapi import APIRouter, Body, Path, Query, Request, status @@ -35,7 +32,7 @@ from fastapi_jsonapi.views.list_view import ListViewBase from fastapi_jsonapi.views.view_base import ViewBase -JSON_API_RESPONSE_TYPE = Dict[Union[int, str], Dict[str, Any]] +JSON_API_RESPONSE_TYPE = dict[int | str, dict[str, Any]] JSONAPIObjectSchemaType = TypeVar("JSONAPIObjectSchemaType", bound=PydanticBaseModel) @@ -57,26 +54,26 @@ class RoutersJSONAPI: """ # xxx: store in app, not in routers! - all_jsonapi_routers: ClassVar[Dict[str, "RoutersJSONAPI"]] = {} + all_jsonapi_routers: ClassVar[dict[str, "RoutersJSONAPI"]] = {} Methods = ViewMethods DEFAULT_METHODS = tuple(str(method) for method in ViewMethods) def __init__( self, router: APIRouter, - path: Union[str, List[str]], - tags: List[str], - class_list: Type["ListViewBase"], - class_detail: Type["DetailViewBase"], - model: Type[TypeModel], - schema: Type[BaseModel], + path: str | list[str], + tags: Iterable[str], + class_list: type["ListViewBase"], + class_detail: type["DetailViewBase"], + model: type[TypeModel], + schema: type[BaseModel], resource_type: str, - schema_in_post: Optional[Type[BaseModel]] = None, - schema_in_patch: Optional[Type[BaseModel]] = None, - pagination_default_size: Optional[int] = 25, - pagination_default_number: Optional[int] = 1, - pagination_default_offset: Optional[int] = None, - pagination_default_limit: Optional[int] = None, + schema_in_post: type[BaseModel] | None = None, + schema_in_patch: type[BaseModel] | None = None, + pagination_default_size: int | None = 25, + pagination_default_number: int | None = 1, + pagination_default_offset: int | None = None, + pagination_default_limit: int | None = None, methods: Iterable[str] = (), ) -> None: """ @@ -103,17 +100,17 @@ def __init__( :param pagination_default_limit: `page[limit]` default swagger param. limit/offset pagination, used with `page[offset]` """ - self._router: APIRouter = router - self._path: Union[str, List[str]] = path - self._tags: List[str] = tags + self.router: APIRouter = router + self.path: str | Iterable[str] = path + self.tags: list[str] = list(tags) self.detail_views = None self.list_views = None - self.detail_view_resource: Type["DetailViewBase"] = class_detail - self.list_view_resource: Type["ListViewBase"] = class_list + self.detail_view_resource: type[DetailViewBase] = class_detail + self.list_view_resource: type[ListViewBase] = class_list self.type_: str = resource_type - self._schema: Type[BaseModel] = schema - self.schema_list: Type[BaseModel] = schema - self.model: Type[TypeModel] = model + self.schema: type[BaseModel] = schema + self.schema_list: type[BaseModel] = schema + self.model: type[TypeModel] = model self.schema_detail = schema # tuple and not set, so ordering is persisted self.methods = tuple(methods) or self.DEFAULT_METHODS @@ -123,10 +120,10 @@ def __init__( raise ValueError(msg) self.all_jsonapi_routers[self.type_] = self - self.pagination_default_size: Optional[int] = pagination_default_size - self.pagination_default_number: Optional[int] = pagination_default_number - self.pagination_default_offset: Optional[int] = pagination_default_offset - self.pagination_default_limit: Optional[int] = pagination_default_limit + self.pagination_default_size: int | None = pagination_default_size + self.pagination_default_number: int | None = pagination_default_number + self.pagination_default_offset: int | None = pagination_default_offset + self.pagination_default_limit: int | None = pagination_default_limit self.schema_builder = SchemaBuilder(resource_type=resource_type) dto = self.schema_builder.create_schemas( @@ -161,11 +158,11 @@ def _prepare_responses(self): } def _create_and_register_generic_views(self): - if isinstance(self._path, Iterable) and not isinstance(self._path, (str, bytes)): - for i_path in self._path: + if isinstance(self.path, Iterable) and not isinstance(self.path, (str, bytes)): + for i_path in self.path: self._register_views(i_path) else: - self._register_views(self._path) + self._register_views(self.path) def get_endpoint_name( self, @@ -185,9 +182,9 @@ def _register_get_resource_list(self, path: str): list_response_example = { status.HTTP_200_OK: {"model": self.list_response_schema}, } - self._router.add_api_route( + self.router.add_api_route( path=path, - tags=self._tags, + tags=self.tags, responses=list_response_example | self.default_error_responses, methods=["GET"], summary=f"Get list of `{self.type_}` objects", @@ -199,9 +196,9 @@ def _register_post_resource_list(self, path: str): create_resource_response_example = { status.HTTP_201_CREATED: {"model": self.detail_response_schema}, } - self._router.add_api_route( + self.router.add_api_route( path=path, - tags=self._tags, + tags=self.tags, responses=create_resource_response_example | self.default_error_responses, methods=["POST"], summary=f"Create object `{self.type_}`", @@ -214,9 +211,9 @@ def _register_delete_resource_list(self, path: str): detail_response_example = { status.HTTP_200_OK: {"model": self.detail_response_schema}, } - self._router.add_api_route( + self.router.add_api_route( path=path, - tags=self._tags, + tags=self.tags, responses=detail_response_example | self.default_error_responses, methods=["DELETE"], summary=f"Delete objects `{self.type_}` by filters", @@ -228,11 +225,11 @@ def _register_get_resource_detail(self, path: str): detail_response_example = { status.HTTP_200_OK: {"model": self.detail_response_schema}, } - self._router.add_api_route( + self.router.add_api_route( # TODO: variable path param name (set default name on DetailView class) # TODO: trailing slash (optional) path=path + "/{obj_id}", - tags=self._tags, + tags=self.tags, responses=detail_response_example | self.default_error_responses, methods=["GET"], summary=f"Get object `{self.type_}` by id", @@ -244,11 +241,11 @@ def _register_patch_resource_detail(self, path: str): update_response_example = { status.HTTP_200_OK: {"model": self.detail_response_schema}, } - self._router.add_api_route( + self.router.add_api_route( # TODO: variable path param name (set default name on DetailView class) # TODO: trailing slash (optional) path=path + "/{obj_id}", - tags=self._tags, + tags=self.tags, responses=update_response_example | self.default_error_responses, methods=["PATCH"], summary=f"Patch object `{self.type_}` by id", @@ -263,11 +260,11 @@ def _register_delete_resource_detail(self, path: str): " the server MUST return a result with no data", }, } - self._router.add_api_route( + self.router.add_api_route( # TODO: variable path param name (set default name on DetailView class) # TODO: trailing slash (optional) path=path + "/{obj_id}", - tags=self._tags, + tags=self.tags, responses=delete_response_example | self.default_error_responses, methods=["DELETE"], summary=f"Delete object `{self.type_}` by id", @@ -276,31 +273,27 @@ def _register_delete_resource_detail(self, path: str): status_code=status.HTTP_204_NO_CONTENT, ) - def _create_pagination_query_params(self) -> List[Parameter]: + def _create_pagination_query_params(self) -> list[Parameter]: size = Query(self.pagination_default_size, alias="page[size]", title="pagination_page_size") number = Query(self.pagination_default_number, alias="page[number]", title="pagination_page_number") offset = Query(self.pagination_default_offset, alias="page[offset]", title="pagination_page_offset") limit = Query(self.pagination_default_limit, alias="page[limit]", title="pagination_page_limit") - params = [] - - for q_param in ( - size, - number, - offset, - limit, - ): - params.append( - Parameter( - # name doesn't really matter here - name=q_param.title, - kind=Parameter.POSITIONAL_OR_KEYWORD, - annotation=Optional[int], - default=q_param, - ), + return [ + Parameter( + # name doesn't really matter here + name=q_param.title, + kind=Parameter.POSITIONAL_OR_KEYWORD, + annotation=int | None, + default=q_param, ) - - return params + for q_param in ( + size, + number, + offset, + limit, + ) + ] @classmethod def _create_filters_query_dependency_param(cls): @@ -346,7 +339,7 @@ def _get_separated_params(cls, sig: Signature): params = [] tail_params = [] - for name, param in sig.parameters.items(): + for param in sig.parameters.values(): if param.kind is Parameter.VAR_KEYWORD: # skip **kwargs for spec continue @@ -389,8 +382,11 @@ def _update_signature_for_resource_detail_view( return sig.replace(parameters=params + include_params + list(additional_dependency_params) + tail_params) - @staticmethod - def _create_dependency_params_from_pydantic_model(model_class: Type[BaseModel]) -> List[Parameter]: + @classmethod + def _create_dependency_params_from_pydantic_model( + cls, + model_class: type[BaseModel], + ) -> list[Parameter]: return [ Parameter( name=field_name, @@ -401,8 +397,12 @@ def _create_dependency_params_from_pydantic_model(model_class: Type[BaseModel]) for field_name, field_info in model_class.model_fields.items() ] - @staticmethod - def _update_method_config(view: Type["ViewBase"], method: HTTPMethod) -> HTTPMethodConfig: + @classmethod + def _update_method_config( + cls, + view: type["ViewBase"], + method: HTTPMethod, + ) -> HTTPMethodConfig: target_config = view.method_dependencies.get(method) or HTTPMethodConfig() common_config = view.method_dependencies.get(HTTPMethod.ALL) or HTTPMethodConfig() @@ -427,17 +427,18 @@ def _update_method_config(view: Type["ViewBase"], method: HTTPMethod) -> HTTPMet return new_method_config + @classmethod def _update_method_config_and_get_dependency_params( - self, - view: Type["ViewBase"], + cls, + view: type["ViewBase"], method: HTTPMethod, - ) -> List[Parameter]: - method_config = self._update_method_config(view, method) + ) -> list[Parameter]: + method_config = cls._update_method_config(view, method) if method_config.dependencies is None: return [] - return self._create_dependency_params_from_pydantic_model(method_config.dependencies) + return cls._create_dependency_params_from_pydantic_model(method_config.dependencies) def prepare_dependencies_handler_signature( self, @@ -459,9 +460,9 @@ def prepare_dependencies_handler_signature( async def handle_view_dependencies( self, request: Request, - view_cls: Type["ViewBase"], + view_cls: type["ViewBase"], method: HTTPMethod, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """ Combines all dependencies (prepared) and returns them as list @@ -484,7 +485,7 @@ def handle_dependencies(**dep_kwargs): ) dep_helper = DependencyHelper(request=request) - dependencies_result: Dict[str, Any] = await dep_helper.run(handle_dependencies) + dependencies_result: dict[str, Any] = await dep_helper.run(handle_dependencies) return dependencies_result def _create_get_resource_list_view(self): @@ -500,8 +501,7 @@ async def wrapper(request: Request, **extra_view_deps): jsonapi=self, ) - response = await resource.handle_get_resource_list(**extra_view_deps) - return response + return await resource.handle_get_resource_list(**extra_view_deps) additional_dependency_params = self._update_method_config_and_get_dependency_params( self.list_view_resource, @@ -532,11 +532,10 @@ async def wrapper( jsonapi=self, ) - response = await resource.handle_post_resource_list( + return await resource.handle_post_resource_list( data_create=data, **extra_view_deps, ) - return response additional_dependency_params = self._update_method_config_and_get_dependency_params( self.list_view_resource, @@ -563,8 +562,7 @@ async def wrapper(request: Request, **extra_view_deps): jsonapi=self, ) - response = await resource.handle_delete_resource_list(**extra_view_deps) - return response + return await resource.handle_delete_resource_list(**extra_view_deps) additional_dependency_params = self._update_method_config_and_get_dependency_params( self.list_view_resource, @@ -594,8 +592,7 @@ async def wrapper(request: Request, obj_id: str = Path(...), **extra_view_deps): ) # TODO: pass obj_id as kwarg (get name from DetailView class) - response = await resource.handle_get_resource_detail(obj_id, **extra_view_deps) - return response + return await resource.handle_get_resource_detail(obj_id, **extra_view_deps) additional_dependency_params = self._update_method_config_and_get_dependency_params( self.detail_view_resource, @@ -629,12 +626,11 @@ async def wrapper( ) # TODO: pass obj_id as kwarg (get name from DetailView class) - response = await resource.handle_update_resource( + return await resource.handle_update_resource( obj_id=obj_id, data_update=data, **extra_view_deps, ) - return response additional_dependency_params = self._update_method_config_and_get_dependency_params( self.detail_view_resource, @@ -665,8 +661,7 @@ async def wrapper( ) # TODO: pass obj_id as kwarg (get name from DetailView class) - response = await resource.handle_delete_resource(obj_id=obj_id, **extra_view_deps) - return response + return await resource.handle_delete_resource(obj_id=obj_id, **extra_view_deps) additional_dependency_params = self._update_method_config_and_get_dependency_params( self.detail_view_resource, @@ -687,7 +682,7 @@ def _register_views(self, path: str): :param path: :return: """ - methods_map: Dict[Union[str, ViewMethods], Callable[[str], None]] = { + methods_map: dict[str | ViewMethods, Callable[[str], None]] = { ViewMethods.GET_LIST: self._register_get_resource_list, ViewMethods.POST: self._register_post_resource_list, ViewMethods.DELETE_LIST: self._register_delete_resource_list, diff --git a/fastapi_jsonapi/atomic/atomic.py b/fastapi_jsonapi/atomic/atomic.py index bcb6a8cc..676abf39 100644 --- a/fastapi_jsonapi/atomic/atomic.py +++ b/fastapi_jsonapi/atomic/atomic.py @@ -1,7 +1,4 @@ -from typing import ( - Optional, - Type, -) +from __future__ import annotations from fastapi import APIRouter, Request, Response, status @@ -13,12 +10,12 @@ class AtomicOperations: - atomic_handler: Type[AtomicViewHandler] = AtomicViewHandler + atomic_handler: type[AtomicViewHandler] = AtomicViewHandler def __init__( self, url_path: str = "/operations", - router: Optional[APIRouter] = None, + router: APIRouter | None = None, ): self.router = router or APIRouter(tags=["Atomic Operations"]) self.url_path = url_path diff --git a/fastapi_jsonapi/atomic/atomic_handler.py b/fastapi_jsonapi/atomic/atomic_handler.py index 8def486d..7d7f8d91 100644 --- a/fastapi_jsonapi/atomic/atomic_handler.py +++ b/fastapi_jsonapi/atomic/atomic_handler.py @@ -7,65 +7,80 @@ from typing import ( TYPE_CHECKING, Any, - Awaitable, Callable, - List, - Optional, - Type, + NoReturn, TypedDict, - Union, ) from fastapi import HTTPException, status +from pydantic import BaseModel as PydanticBaseModel from pydantic import ValidationError -from starlette.requests import Request from fastapi_jsonapi import RoutersJSONAPI from fastapi_jsonapi.atomic.prepared_atomic_operation import LocalIdsType, OperationBase -from fastapi_jsonapi.atomic.schemas import AtomicOperation, AtomicOperationRequest, AtomicResultResponse if TYPE_CHECKING: + from collections.abc import Awaitable + + from starlette.requests import Request + + from fastapi_jsonapi.atomic.schemas import ( + AtomicOperation, + AtomicOperationRequest, + AtomicResultResponse, + ) from fastapi_jsonapi.data_layers.base import BaseDataLayer + from fastapi_jsonapi.data_typing import TypeSchema log = logging.getLogger(__name__) -AtomicResponseDict = TypedDict("AtomicResponseDict", {"atomic:results": List[Any]}) +AtomicResponseDict = TypedDict("AtomicResponseDict", {"atomic:results": list[Any]}) current_atomic_operation: ContextVar[OperationBase] = ContextVar("current_atomic_operation") +OPERATION_VALIDATION_ERROR_TEXT = "Validation error on operation {operation!r}" + + +def handle_operation_exc( + operation: OperationBase, + ex: ValidationError | ValueError, +) -> NoReturn: + log.exception( + "Validation error on atomic action ref=%s, data=%s", + operation.ref, + operation.data, + ) + errors_details = { + "message": OPERATION_VALIDATION_ERROR_TEXT.format(operation=operation.op_type), + "ref": operation.ref, + "data": operation.data.model_dump(exclude_unset=True), + } + if isinstance(ex, ValidationError): + errors_details.update(errors=ex.errors()) + elif isinstance(ex, ValueError): + errors_details.update(error=str(ex)) + else: + raise ex + # TODO: json:api exception + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + detail=errors_details, + ) + + def catch_exc_on_operation_handle(func: Callable[..., Awaitable]): @wraps(func) async def wrapper(*a, operation: OperationBase, **kw): try: return await func(*a, operation=operation, **kw) except (ValidationError, ValueError) as ex: - log.exception( - "Validation error on atomic action ref=%s, data=%s", - operation.ref, - operation.data, - ) - errors_details = { - "message": f"Validation error on operation {operation.op_type}", - "ref": operation.ref, - "data": operation.data.model_dump(), - } - if isinstance(ex, ValidationError): - errors_details.update(errors=ex.errors()) - elif isinstance(ex, ValueError): - errors_details.update(error=str(ex)) - else: - raise - # TODO: json:api exception - raise HTTPException( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - detail=errors_details, - ) + handle_operation_exc(operation, ex) return wrapper class AtomicViewHandler: - jsonapi_routers_cls: Type[RoutersJSONAPI] = RoutersJSONAPI + jsonapi_routers_cls: type[RoutersJSONAPI] = RoutersJSONAPI def __init__( self, @@ -90,17 +105,16 @@ async def prepare_one_operation(self, operation: AtomicOperation): raise ValueError(msg) jsonapi = self.jsonapi_routers_cls.all_jsonapi_routers[operation_type] - one_operation = OperationBase.prepare( + return OperationBase.prepare( action=operation.op, request=self.request, jsonapi=jsonapi, ref=operation.ref, data=operation.data, ) - return one_operation - async def prepare_operations(self) -> List[OperationBase]: - prepared_operations: List[OperationBase] = [] + async def prepare_operations(self) -> list[OperationBase]: + prepared_operations: list[OperationBase] = [] for operation in self.operations_request.operations: one_operation = await self.prepare_one_operation(operation) @@ -113,26 +127,39 @@ async def process_one_operation( self, dl: BaseDataLayer, operation: OperationBase, - ): + ) -> TypeSchema | None: operation.update_relationships_with_lid(local_ids=self.local_ids_cache) return await operation.handle(dl=dl) - async def handle(self) -> Union[AtomicResponseDict, AtomicResultResponse, None]: + async def process_next_operation( + self, + operation: OperationBase, + previous_dl: BaseDataLayer | None, + ) -> tuple[TypeSchema | None, BaseDataLayer]: + dl = await operation.get_data_layer() + await dl.atomic_start(previous_dl=previous_dl) + try: + response = await self.process_one_operation( + dl=dl, + operation=operation, + ) + except HTTPException as ex: + # gracefully end!! + await dl.atomic_end(success=False, exception=ex) + raise + + return response, dl + + async def handle(self) -> AtomicResponseDict | AtomicResultResponse | None: prepared_operations = await self.prepare_operations() results = [] only_empty_responses = True success = True - previous_dl: Optional[BaseDataLayer] = None + previous_dl: BaseDataLayer | None = None for operation in prepared_operations: # set context var ctx_var_token = current_atomic_operation.set(operation) - - dl: BaseDataLayer = await operation.get_data_layer() - await dl.atomic_start(previous_dl=previous_dl) - response = await self.process_one_operation( - dl=dl, - operation=operation, - ) + response, dl = await self.process_next_operation(operation, previous_dl) previous_dl = dl # response.data.id @@ -143,7 +170,12 @@ async def handle(self) -> Union[AtomicResponseDict, AtomicResultResponse, None]: results.append({}) continue only_empty_responses = False - results.append({"data": response.data}) + response_data = response.data + # TODO: leave as is? Is there any chance we get not a Pydantic model? + # maybe type annotations + mypy will help here + if isinstance(response_data, PydanticBaseModel): + response_data = response_data.model_dump() + results.append({"data": response_data}) if operation.data.lid and response.data: self.local_ids_cache[operation.data.type][operation.data.lid] = response.data.id @@ -160,3 +192,4 @@ async def handle(self) -> Union[AtomicResponseDict, AtomicResultResponse, None]: if all results are empty, the server MAY respond with 204 No Content and no document. """ + return None diff --git a/fastapi_jsonapi/atomic/prepared_atomic_operation.py b/fastapi_jsonapi/atomic/prepared_atomic_operation.py index cbdf0cc2..4b906a28 100644 --- a/fastapi_jsonapi/atomic/prepared_atomic_operation.py +++ b/fastapi_jsonapi/atomic/prepared_atomic_operation.py @@ -1,28 +1,29 @@ from __future__ import annotations from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Dict, Optional, Type +from typing import TYPE_CHECKING, Any -from fastapi import Request - -from fastapi_jsonapi import RoutersJSONAPI from fastapi_jsonapi.atomic.schemas import AtomicOperationAction, AtomicOperationRef, OperationDataType from fastapi_jsonapi.views.utils import HTTPMethod if TYPE_CHECKING: + from fastapi import Request + + from fastapi_jsonapi import RoutersJSONAPI from fastapi_jsonapi.data_layers.base import BaseDataLayer + from fastapi_jsonapi.data_typing import TypeSchema from fastapi_jsonapi.views.detail_view import DetailViewBase from fastapi_jsonapi.views.list_view import ListViewBase from fastapi_jsonapi.views.view_base import ViewBase -LocalIdsType = Dict[str, Dict[str, str]] +LocalIdsType = dict[str, dict[str, str]] @dataclass class OperationBase: jsonapi: RoutersJSONAPI view: ViewBase - ref: Optional[AtomicOperationRef] + ref: AtomicOperationRef | None data: OperationDataType op_type: str @@ -36,10 +37,10 @@ def prepare( action: str, request: Request, jsonapi: RoutersJSONAPI, - ref: Optional[AtomicOperationRef], + ref: AtomicOperationRef | None, data: OperationDataType, - ) -> "OperationBase": - view_cls: Type[ViewBase] = jsonapi.detail_view_resource + ) -> OperationBase: + view_cls: type[ViewBase] = jsonapi.detail_view_resource if hasattr(action, "value"): # convert to str if enum @@ -67,14 +68,14 @@ def prepare( ) async def get_data_layer(self) -> BaseDataLayer: - data_layer_view_dependencies: Dict[str, Any] = await self.jsonapi.handle_view_dependencies( + data_layer_view_dependencies: dict[str, Any] = await self.jsonapi.handle_view_dependencies( request=self.view.request, view_cls=self.view.__class__, method=self.http_method, ) return await self.view.get_data_layer(data_layer_view_dependencies) - async def handle(self, dl: BaseDataLayer): + async def handle(self, dl: BaseDataLayer) -> TypeSchema | None: raise NotImplementedError @classmethod @@ -87,7 +88,7 @@ def upd_one_relationship_with_local_id(cls, relationship_info: dict, local_ids: :return: """ missing = object() - lid = relationship_info.get("lid", missing) + lid: str | type[missing] = relationship_info.get("lid", missing) if lid is missing: return @@ -113,7 +114,7 @@ def upd_one_relationship_with_local_id(cls, relationship_info: dict, local_ids: def update_relationships_with_lid(self, local_ids: LocalIdsType): if not (self.data and self.data.relationships): return - for relationship_name, relationship_value in self.data.relationships.items(): + for relationship_value in self.data.relationships.values(): relationship_data = relationship_value["data"] if isinstance(relationship_data, list): for data in relationship_data: @@ -122,7 +123,7 @@ def update_relationships_with_lid(self, local_ids: LocalIdsType): self.upd_one_relationship_with_local_id(relationship_data, local_ids=local_ids) else: msg = "unexpected relationship data" - raise ValueError(msg) + raise ValueError(msg) # noqa: TRY004 class ListOperationBase(OperationBase): @@ -136,22 +137,21 @@ class DetailOperationBase(OperationBase): class OperationAdd(ListOperationBase): http_method = HTTPMethod.POST - async def handle(self, dl: BaseDataLayer): + async def handle(self, dl: BaseDataLayer) -> TypeSchema: # use outer schema wrapper because we need this error path: # `{'loc': ['data', 'attributes', 'name']` # and not `{'loc': ['attributes', 'name']` - data_in = self.jsonapi.schema_in_post(data=self.data) - response = await self.view.process_create_object( + data_in = self.jsonapi.schema_in_post(data=self.data.model_dump(exclude_unset=True)) + return await self.view.process_create_object( dl=dl, data_create=data_in.data, ) - return response class OperationUpdate(DetailOperationBase): http_method = HTTPMethod.PATCH - async def handle(self, dl: BaseDataLayer): + async def handle(self, dl: BaseDataLayer) -> TypeSchema: if self.data is None: # TODO: clear to-one relationships pass @@ -160,14 +160,13 @@ async def handle(self, dl: BaseDataLayer): # use outer schema wrapper because we need this error path: # `{'loc': ['data', 'attributes', 'name']` # and not `{'loc': ['attributes', 'name']` - data_in = self.jsonapi.schema_in_patch(data=self.data) + data_in = self.jsonapi.schema_in_patch(data=self.data.model_dump(exclude_unset=True)) obj_id = self.ref and self.ref.id or self.data and self.data.id - response = await self.view.process_update_object( + return await self.view.process_update_object( dl=dl, obj_id=obj_id, data_update=data_in.data, ) - return response class OperationRemove(DetailOperationBase): diff --git a/fastapi_jsonapi/atomic/schemas.py b/fastapi_jsonapi/atomic/schemas.py index ffbf0699..edccb106 100644 --- a/fastapi_jsonapi/atomic/schemas.py +++ b/fastapi_jsonapi/atomic/schemas.py @@ -1,10 +1,12 @@ from __future__ import annotations from enum import Enum -from typing import Any, List, Optional, Union +from typing import Any, Union from pydantic import BaseModel, Field, model_validator +from fastapi_jsonapi.utils import logical_xor + class OperationRelationshipSchema(BaseModel): id: str = Field(default=..., description="Related object ID") @@ -17,10 +19,10 @@ class OperationItemInSchema(BaseModel): """ type: str = Field(default=..., description="Resource type") - id: Optional[str] = Field(default=None, description="Resource object ID") - lid: Optional[str] = Field(default=None, description="Resource object local ID") - attributes: Optional[dict] = Field(None, description="Resource object attributes") - relationships: Optional[dict] = Field(None, description="Resource object relationships") + id: str | None = Field(default=None, description="Resource object ID") + lid: str | None = Field(default=None, description="Resource object local ID") + attributes: dict | None = Field(None, description="Resource object attributes") + relationships: dict | None = Field(None, description="Resource object relationships") OperationDataType = Union[ @@ -28,7 +30,7 @@ class OperationItemInSchema(BaseModel): # any object creation OperationItemInSchema, # to-many relationship - List[OperationRelationshipSchema], + list[OperationRelationshipSchema], # to-one relationship OperationRelationshipSchema, # not required @@ -50,9 +52,9 @@ class AtomicOperationRef(BaseModel): """ type: str = Field(default=...) - id: Optional[str] = Field(default=None) - lid: Optional[str] = Field(default=None) - relationship: Optional[str] = Field(default=None) + id: str | None = Field(default=None) + lid: str | None = Field(default=None) + relationship: str | None = Field(default=None) @model_validator(mode="before") def validate_atomic_operation_ref(cls, values: dict): @@ -62,11 +64,7 @@ def validate_atomic_operation_ref(cls, values: dict): :param values: :return: """ - if ( - # XOR - bool(values.get("lid")) - != bool(values.get("id")) - ): + if logical_xor(values.get("lid"), values.get("id")): # if one of id/lid is present, ref is ok return values @@ -105,8 +103,8 @@ class AtomicOperation(BaseModel): default=..., description="an operation code, expressed as a string, that indicates the type of operation to perform.", ) - ref: Optional[AtomicOperationRef] = Field(default=None) - href: Optional[str] = Field( + ref: AtomicOperationRef | None = Field(default=None) + href: str | None = Field( default=None, description="a string that contains a URI-reference that identifies the target of the operation.", ) @@ -116,7 +114,7 @@ class AtomicOperation(BaseModel): description="the operation’s “primary data”.", ) - meta: Optional[dict] = Field( + meta: dict | None = Field( default=None, description="a meta object that contains non-standard meta-information about the operation", ) @@ -168,7 +166,7 @@ def validate_operation(cls, values: dict): """ cls._validate_one_of_ref_or_href(values=values) op = values.get("op") - ref: Optional[AtomicOperationRef] = values.get("ref") + ref: AtomicOperationRef | None = values.get("ref") if op == AtomicOperationAction.remove: if not ref: msg = f"ref should be present for action {op!r}" @@ -182,7 +180,13 @@ def validate_operation(cls, values: dict): raise ValueError(msg) data: OperationDataType = values.get("data") - operation_type = cls._get_value_from_dict_or_obj(ref, "type") or cls._get_value_from_dict_or_obj(data, "type") + operation_type = ( + # take from ref + cls._get_value_from_dict_or_obj(ref, "type") + or + # or take from data + cls._get_value_from_dict_or_obj(data, "type") + ) if not operation_type: msg = "Operation has to be in ref or in data" raise ValueError(msg) @@ -191,18 +195,18 @@ def validate_operation(cls, values: dict): class AtomicOperationRequest(BaseModel): - operations: List[AtomicOperation] = Field( + operations: list[AtomicOperation] = Field( alias="atomic:operations", min_length=1, ) class AtomicResult(BaseModel): - data: Optional[dict] = Field( + data: dict | None = Field( default=None, description="the “primary data” resulting from the operation.", ) - meta: Optional[dict] = Field( + meta: dict | None = Field( default=None, description="a meta object that contains non-standard meta-information about the result.", ) @@ -213,7 +217,7 @@ class AtomicResultResponse(BaseModel): https://jsonapi.org/ext/atomic/#auto-id-responses-4 """ - results: List[AtomicResult] = Field( + results: list[AtomicResult] = Field( alias="atomic:results", min_length=1, ) diff --git a/fastapi_jsonapi/common.py b/fastapi_jsonapi/common.py new file mode 100644 index 00000000..61b86627 --- /dev/null +++ b/fastapi_jsonapi/common.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from fastapi_jsonapi.types_metadata import ( + ClientCanSetId, + CustomFilterSQL, + RelationshipInfo, +) +from fastapi_jsonapi.utils.metadata_instance_search import MetadataInstanceSearch + +if TYPE_CHECKING: + # noinspection PyProtectedMember + from pydantic.fields import FieldInfo + +search_client_can_set_id = MetadataInstanceSearch[ClientCanSetId](ClientCanSetId) +search_relationship_info = MetadataInstanceSearch[RelationshipInfo](RelationshipInfo) +search_custom_filter_sql = MetadataInstanceSearch[CustomFilterSQL](CustomFilterSQL) + + +def get_relationship_info_from_field_metadata( + field: FieldInfo, +) -> RelationshipInfo | None: + return search_relationship_info.first(field) diff --git a/examples/api_for_sqlalchemy/utils/sqlalchemy/fields/__init__.py b/fastapi_jsonapi/contrib/__init__.py similarity index 100% rename from examples/api_for_sqlalchemy/utils/sqlalchemy/fields/__init__.py rename to fastapi_jsonapi/contrib/__init__.py diff --git a/fastapi_jsonapi/contrib/sqla/__init__.py b/fastapi_jsonapi/contrib/sqla/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/fastapi_jsonapi/contrib/sqla/filters.py b/fastapi_jsonapi/contrib/sqla/filters.py new file mode 100644 index 00000000..ae4ee85a --- /dev/null +++ b/fastapi_jsonapi/contrib/sqla/filters.py @@ -0,0 +1,53 @@ +from typing import ( + TYPE_CHECKING, + cast, +) + +from sqlalchemy import ( + BinaryExpression, + BooleanClauseList, + func, +) +from sqlalchemy.orm import InstrumentedAttribute + +from fastapi_jsonapi.types_metadata import CustomFilterSQL + +if TYPE_CHECKING: + from pydantic.fields import FieldInfo + + +SQLAExpressionType = BinaryExpression | BooleanClauseList + + +class CustomFilterSQLA(CustomFilterSQL[InstrumentedAttribute, SQLAExpressionType]): + """Base class for custom SQLAlchemy filters""" + + +class LowerEqualsFilterSQL(CustomFilterSQLA): + def get_expression( + self, + schema_field: "FieldInfo", + model_column: InstrumentedAttribute, + value: str, + operator: str, + ) -> BinaryExpression: + return cast( + BinaryExpression, + func.lower(model_column) == func.lower(value), + ) + + +# TODO: tests coverage +class JSONBContainsFilterSQL(CustomFilterSQLA): + def get_expression( + self, + schema_field: "FieldInfo", + model_column: InstrumentedAttribute, + value: str, + operator: str, + ) -> BinaryExpression: + return model_column.op("@>")(value) + + +sql_filter_lower_equals = LowerEqualsFilterSQL(op="lower_equals") +sql_filter_jsonb_contains = JSONBContainsFilterSQL(op="jsonb_contains") diff --git a/fastapi_jsonapi/data_layers/base.py b/fastapi_jsonapi/data_layers/base.py index 14e9b180..2c835c43 100644 --- a/fastapi_jsonapi/data_layers/base.py +++ b/fastapi_jsonapi/data_layers/base.py @@ -5,14 +5,15 @@ you must inherit from this base class """ -from typing import Dict, List, Optional, Tuple, Type +from __future__ import annotations from fastapi import Request +from pydantic import TypeAdapter +from fastapi_jsonapi.common import search_client_can_set_id from fastapi_jsonapi.data_typing import TypeModel, TypeSchema from fastapi_jsonapi.querystring import QueryStringManager from fastapi_jsonapi.schema import BaseJSONAPIItemInSchema -from fastapi_jsonapi.schema_builder import FieldConfig, TransferSaveWrapper class BaseDataLayer: @@ -21,10 +22,11 @@ class BaseDataLayer: def __init__( self, request: Request, - schema: Type[TypeSchema], - model: Type[TypeModel], + schema: type[TypeSchema], + model: type[TypeModel], + *, url_id_field: str, - id_name_field: Optional[str] = None, + id_name_field: str | None = None, disable_collection_count: bool = False, default_collection_count: int = -1, type_: str = "", @@ -53,20 +55,20 @@ def __init__( self.is_atomic = False self.type_ = type_ - async def atomic_start(self, previous_dl: Optional["BaseDataLayer"] = None): + async def atomic_start( + self, + previous_dl: BaseDataLayer | None = None, + ) -> None: self.is_atomic = True - async def atomic_end(self, success: bool = True): + async def atomic_end( + self, + *, + success: bool = True, + exception: Exception | None = None, + ) -> None: raise NotImplementedError - def _unwrap_field_config(self, extra: Dict): - field_config_wrapper: Optional[TransferSaveWrapper] = extra.get("field_config") - - if field_config_wrapper: - return field_config_wrapper.get_field_config() - - return FieldConfig() - def _apply_client_generated_id( self, data_create: BaseJSONAPIItemInSchema, @@ -80,13 +82,16 @@ def _apply_client_generated_id( """ if data_create.id is None: return model_kwargs - extra = data_create.model_fields["id"].json_schema_extra - if extra is not None and extra.get("client_can_set_id"): + + # TODO: annotation? + field = data_create.model_fields["id"] + if can_set_id := search_client_can_set_id.first(field): id_value = data_create.id - field_config = self._unwrap_field_config(extra) - if field_config.cast_type: - id_value = field_config.cast_type(id_value) + if can_set_id.cast_type: + # TODO: use type adapter only on builtin types? + t = TypeAdapter(can_set_id.cast_type) + id_value = t.validate_python(id_value) model_kwargs["id"] = id_value @@ -117,12 +122,12 @@ def get_object_id_field(self): except AttributeError: msg = f"{self.model.__name__} has no attribute {id_name_field}" # TODO: any custom exception type? - raise Exception(msg) + raise Exception(msg) # noqa: TRY002 def get_object_id(self, obj: TypeModel): return getattr(obj, self.get_object_id_field_name()) - async def get_object(self, view_kwargs: dict, qs: Optional[QueryStringManager] = None) -> TypeModel: + async def get_object(self, view_kwargs: dict, qs: QueryStringManager | None = None) -> TypeModel: """ Retrieve an object @@ -132,7 +137,7 @@ async def get_object(self, view_kwargs: dict, qs: Optional[QueryStringManager] = """ raise NotImplementedError - async def get_collection(self, qs: QueryStringManager, view_kwargs: Optional[dict] = None) -> Tuple[int, list]: + async def get_collection(self, qs: QueryStringManager, view_kwargs: dict | None = None) -> tuple[int, list]: """ Retrieve a collection of objects @@ -236,7 +241,7 @@ async def delete_relationship( def get_related_model_query_base( self, - related_model: Type[TypeModel], + related_model: type[TypeModel], ): """ Prepare query for the related model @@ -248,7 +253,7 @@ def get_related_model_query_base( def get_related_object_query( self, - related_model: Type[TypeModel], + related_model: type[TypeModel], related_id_field: str, id_value: str, ): @@ -264,7 +269,7 @@ def get_related_object_query( def get_related_objects_list_query( self, - related_model: Type[TypeModel], + related_model: type[TypeModel], related_id_field: str, ids: list[str], ): @@ -281,7 +286,7 @@ def get_related_objects_list_query( # async def get_related_object_query(self): async def get_related_object( self, - related_model: Type[TypeModel], + related_model: type[TypeModel], related_id_field: str, id_value: str, ) -> TypeModel: @@ -297,7 +302,7 @@ async def get_related_object( async def get_related_objects_list( self, - related_model: Type[TypeModel], + related_model: type[TypeModel], related_id_field: str, ids: list[str], ) -> list[TypeModel]: @@ -412,27 +417,25 @@ async def after_delete_object(self, obj: TypeModel, view_kwargs): """ raise NotImplementedError - async def delete_objects(self, objects: List[TypeModel], view_kwargs): + async def delete_objects(self, objects: list[TypeModel], view_kwargs): # TODO: doc raise NotImplementedError - async def before_delete_objects(self, objects: List[TypeModel], view_kwargs: dict): + async def before_delete_objects(self, objects: list[TypeModel], view_kwargs: dict): """ Make checks before deleting objects. :param objects: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass - async def after_delete_objects(self, objects: List[TypeModel], view_kwargs: dict): + async def after_delete_objects(self, objects: list[TypeModel], view_kwargs: dict): """ Any action after deleting objects. :param objects: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass async def before_create_relationship( self, diff --git a/fastapi_jsonapi/data_layers/fields/__init__.py b/fastapi_jsonapi/data_layers/fields/__init__.py deleted file mode 100644 index 64a97161..00000000 --- a/fastapi_jsonapi/data_layers/fields/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Fields package.""" diff --git a/fastapi_jsonapi/data_layers/fields/enum.py b/fastapi_jsonapi/data_layers/fields/enum.py deleted file mode 100644 index b7c0b9de..00000000 --- a/fastapi_jsonapi/data_layers/fields/enum.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Base enum module.""" - -from fastapi_jsonapi.data_layers.fields.mixins import ( - MixinEnum, - MixinIntEnum, -) - - -class Enum(MixinEnum): - """ - Base enum class. - - All used non-integer enumerations must inherit from this class. - """ - - pass - - -class IntEnum(MixinIntEnum): - """ - Base IntEnum class. - - All used integer enumerations must inherit from this class. - """ - - pass diff --git a/fastapi_jsonapi/data_layers/fields/mixins.py b/fastapi_jsonapi/data_layers/fields/mixins.py deleted file mode 100644 index 22a14530..00000000 --- a/fastapi_jsonapi/data_layers/fields/mixins.py +++ /dev/null @@ -1,81 +0,0 @@ -"""Enum mixin module.""" - -from enum import ( - Enum, - IntEnum, -) - - -class MixinEnum(Enum): - """Extension over enum class from standard library.""" - - @classmethod - def names(cls): - """Get all field names.""" - return ",".join(field.name for field in cls) - - @classmethod - def values(cls): - """Get all values from Enum.""" - return [value for _, value in cls._member_map_.items()] - - @classmethod - def keys(cls): - """Get all field keys from Enum.""" - return [key for key, _ in cls._member_map_.items()] - - @classmethod - def inverse(cls): - """Return all inverted items sequence.""" - return {value: key for key, value in cls._member_map_.items()} - - @classmethod - def value_to_enum(cls, value): - """Convert value to enum.""" - val_to_enum = {value.value: value for _, value in cls._member_map_.items()} - return val_to_enum.get(value) - - -class MixinIntEnum(IntEnum): - """ - Здесь пришлось дублировать код, чтобы обеспечить совместимость с FastAPI и Pydantic. - - Основная проблема - данные либы определяют валидаторы для стандартной библиотеки enum, используя вызов issubclass. - И для стандартного IntEnum есть отдельная ветка issubclass(IntEnum), в которой происходят - специальные преобразования, например, аргументы из запроса конвертируются в тип int. - Поэтому OurEnum(int, Enum) не срабатывает по условию issubclass(obj, IntEnum) и выбираются - неверные валидаторы и конверторы. - А код ниже пришлось задублировать, так как у стандартного Enum есть метакласс, который разрешает только - такую цепочку наследования: - NewEnum(клас_тип, миксин_без_типа_1, ..., миксин_без_типа_n, Enum) - По этому правилу нельзя построить наследование, добавляющее миксин без типа к стандартному IntEnum: - NewEnum(our_mixin, IntEnum), так как IntEnum = (int, Enum) - Поэтому пока остается такое решение до каких-либо исправлений со стороны разработчиков либы, - либо появления более гениальных идей - """ - - @classmethod - def names(cls): - """Get all field names.""" - return ",".join(field.name for field in cls) - - @classmethod - def values(cls): - """Get all values from Enum.""" - return [value for _, value in cls._member_map_.items()] - - @classmethod - def keys(cls): - """Get all field keys from Enum.""" - return [key for key, _ in cls._member_map_.items()] - - @classmethod - def inverse(cls): - """Return all inverted items sequence.""" - return {value: key for key, value in cls._member_map_.items()} - - @classmethod - def value_to_enum(cls, value): - """Convert value to enum.""" - val_to_enum = {value.value: value for _, value in cls._member_map_.items()} - return val_to_enum.get(value) diff --git a/fastapi_jsonapi/data_layers/filtering/sqlalchemy.py b/fastapi_jsonapi/data_layers/filtering/sqlalchemy.py index af16129b..617eb530 100644 --- a/fastapi_jsonapi/data_layers/filtering/sqlalchemy.py +++ b/fastapi_jsonapi/data_layers/filtering/sqlalchemy.py @@ -1,20 +1,20 @@ """Helper to create sqlalchemy filters according to filter querystring parameter""" -import inspect + +from __future__ import annotations + import logging -from collections.abc import Sequence from typing import ( Any, Callable, - Dict, - List, - Optional, - Set, - Tuple, - Type, - Union, ) -from pydantic import BaseConfig, BaseModel, ConfigDict +from pydantic import ( + BaseConfig, + BaseModel, + ConfigDict, + PydanticSchemaGenerationError, + TypeAdapter, +) from pydantic.fields import FieldInfo from sqlalchemy import and_, false, not_, or_ from sqlalchemy.orm import aliased @@ -22,17 +22,25 @@ from sqlalchemy.orm.util import AliasedClass from sqlalchemy.sql.elements import BinaryExpression, BooleanClauseList +from fastapi_jsonapi.common import search_custom_filter_sql from fastapi_jsonapi.data_typing import TypeModel, TypeSchema from fastapi_jsonapi.exceptions import InvalidFilters, InvalidType from fastapi_jsonapi.exceptions.json_api import HTTPException -from fastapi_jsonapi.schema import JSONAPISchemaIntrospectionError, get_model_field, get_relationships +from fastapi_jsonapi.schema import ( + JSONAPISchemaIntrospectionError, + get_model_field, + get_relationship_fields_names, + get_schema_from_field_annotation, +) +from fastapi_jsonapi.types_metadata import CustomFilterSQL +from fastapi_jsonapi.utils import check_can_be_none log = logging.getLogger(__name__) RELATIONSHIP_SPLITTER = "." # The mapping with validators using by to cast raw value to instance of target type -REGISTERED_PYDANTIC_TYPES: Dict[Type, List[Callable]] = {} +REGISTERED_PYDANTIC_TYPES: dict[type, list[Callable]] = {} cast_failed = object() @@ -40,21 +48,14 @@ class RelationshipFilteringInfo(BaseModel): - target_schema: Type[TypeSchema] - model: Type[TypeModel] + target_schema: type[TypeSchema] + model: type[TypeModel] aliased_model: AliasedClass join_column: InstrumentedAttribute model_config = ConfigDict(arbitrary_types_allowed=True) -def check_can_be_none(fields: list[FieldInfo]) -> bool: - """ - Return True if None is possible value for target field - """ - return any(field_item.exclude for field_item in fields) - - -def separate_types(types: List[Type]) -> Tuple[List[Type], List[Type]]: +def separate_types(types: list[type]) -> tuple[list[type], list[type]]: """ Separates the types into two kinds. @@ -78,30 +79,11 @@ def separate_types(types: List[Type]) -> Tuple[List[Type], List[Type]]: return pydantic_types, userspace_types -def validator_requires_model_field(validator: Callable) -> bool: - """ - Check if validator accepts the `field` param - - :param validator: - :return: - """ - signature = inspect.signature(validator) - parameters = signature.parameters - - if "field" not in parameters: - return False - - field_param = parameters["field"] - field_type = field_param.annotation - - return field_type == "ModelField" or field_type is FieldInfo - - def cast_value_with_pydantic( - types: List[Type], + types: list[type], value: Any, schema_field: FieldInfo, -) -> Tuple[Optional[Any], List[str]]: +) -> tuple[Any | None, list[str]]: result_value, errors = None, [] for type_to_cast in types: @@ -109,7 +91,8 @@ def cast_value_with_pydantic( # Создаем экземпляр схемы для валидации значения model_instance = type_to_cast(**{schema_field.title: value}, __config__=BaseConfig) result_value = model_instance.model_dump()[schema_field.title] - except Exception as ex: + # TODO: handle specific exception + except Exception as ex: # noqa: BLE001, PERF203 errors.append(str(ex)) else: return result_value, errors @@ -118,15 +101,15 @@ def cast_value_with_pydantic( def cast_iterable_with_pydantic( - types: List[Type], - values: List, + types: list[type], + values: list, schema_field: FieldInfo, -) -> Tuple[List, List[str]]: +) -> tuple[list, list[str]]: type_cast_failed = False failed_values = [] - result_values: List[Any] = [] - errors: List[str] = [] + result_values: list[Any] = [] + errors: list[str] = [] for value in values: casted_value, cast_errors = cast_value_with_pydantic( @@ -151,16 +134,25 @@ def cast_iterable_with_pydantic( return result_values, errors -def cast_value_with_scheme(field_types: List[Type], value: Any) -> Tuple[Any, List[str]]: - errors: List[str] = [] +def get_type_cast(field_type: type) -> Callable[..., Any]: + try: + # don't allow arbitrary types, we don't know their behaviour + return TypeAdapter(field_type).validate_python + except PydanticSchemaGenerationError: + return field_type + + +def cast_value_with_scheme(field_types: list[type], value: Any) -> tuple[Any, list[str]]: + errors: list[str] = [] casted_value = cast_failed for field_type in field_types: + cast_type = get_type_cast(field_type) try: if isinstance(value, list): # noqa: SIM108 - casted_value = [field_type(item) for item in value] + casted_value = [cast_type(item) for item in value] else: - casted_value = field_type(value) + casted_value = cast_type(value) except (TypeError, ValueError) as ex: errors.append(str(ex)) else: @@ -170,6 +162,7 @@ def cast_value_with_scheme(field_types: List[Type], value: Any) -> Tuple[Any, Li def build_filter_expression( + field_name: str, schema_field: FieldInfo, model_column: InstrumentedAttribute, operator: str, @@ -182,6 +175,7 @@ def build_filter_expression( To implement a new filtering logic (override existing or create a new one) create a method inside a field following this pattern: `__sql_filter_` + :param field_name: :param schema_field: schemas field instance :param model_column: sqlalchemy column instance :param operator: your operator, for example: "eq", "in", "ilike_str_array", ... @@ -190,21 +184,18 @@ def build_filter_expression( """ fields = [schema_field] - # for Union annotations - if hasattr(schema_field, "fields") and schema_field.fields: - fields.extend(schema_field.fields) - can_be_none = check_can_be_none(fields) if value is None: if can_be_none: return getattr(model_column, operator)(value) - raise InvalidFilters(detail=f"The field `{schema_field.title}` can't be null") + raise InvalidFilters(detail=f"The field `{field_name}` can't be null") + # TODO: type adapter for the whole annotation? types = [i.annotation for i in fields] casted_value = None - errors: List[str] = [] + errors: list[str] = [] pydantic_types, userspace_types = separate_types(types) @@ -215,8 +206,6 @@ def build_filter_expression( casted_value, errors = func(pydantic_types, value, schema_field) if casted_value is None and userspace_types: - log.warning("Filtering by user type values is not properly tested yet. Use this on your own risk.") - casted_value, errors = cast_value_with_scheme(types, value) if casted_value is cast_failed: @@ -278,8 +267,8 @@ def gather_relationship_paths(filter_item: Union[dict, list]) -> Set[str]: def get_model_column( - model: Type[TypeModel], - schema: Type[TypeSchema], + model: type[TypeModel], + schema: type[TypeSchema], field_name: str, ) -> InstrumentedAttribute: try: @@ -290,7 +279,7 @@ def get_model_column( try: return getattr(model, model_field) except AttributeError: - msg = "{} has no attribute {}".format(model.__name__, model_field) + msg = f"{model.__name__} has no attribute {model_field}" raise InvalidFilters(msg) @@ -310,35 +299,45 @@ def get_operator(model_column: InstrumentedAttribute, operator_name: str) -> str if hasattr(model_column, op): return op - msg = "{} has no operator {}".format(model_column.key, operator_name) + msg = f"Field {model_column.key!r} has no operator {operator_name!r}" raise InvalidFilters(msg) -def get_custom_filter_expression_callable(schema_field, operator: str) -> Callable: - return schema_field.json_schema_extra.get( - f"_{operator}_sql_filter_", - ) +def get_custom_filter_expression_callable(schema_field, operator: str) -> CustomFilterSQL | None: + for filter_sql in search_custom_filter_sql.iterate(field=schema_field): + if filter_sql.op == operator: + return filter_sql + return None + + +def validate_relationship_name( + schema: type[TypeSchema], + target_relationship_name: str, +) -> None: + relationships_names = get_relationship_fields_names(schema) + if target_relationship_name not in relationships_names: + msg = f"There is no relationship {target_relationship_name!r} defined in schema {schema.__name__!r}" + raise InvalidFilters(msg) def gather_relationships_info( - model: Type[TypeModel], - schema: Type[TypeSchema], - relationship_path: List[str], + model: type[TypeModel], + schema: type[TypeSchema], + relationship_path: list[str], collected_info: dict[RelationshipPath, RelationshipFilteringInfo], target_relationship_idx: int = 0, - prev_aliased_model: Optional[Any] = None, + prev_aliased_model: Any | None = None, ) -> dict[RelationshipPath, RelationshipFilteringInfo]: is_last_relationship = target_relationship_idx == len(relationship_path) - 1 target_relationship_path = RELATIONSHIP_SPLITTER.join( relationship_path[: target_relationship_idx + 1], ) target_relationship_name = relationship_path[target_relationship_idx] - - if target_relationship_name not in set(get_relationships(schema)): - msg = f"There are no relationship '{target_relationship_name}' defined in schema {schema.__name__}" - raise InvalidFilters(msg) - - target_schema = schema.model_fields[target_relationship_name].annotation + validate_relationship_name( + schema=schema, + target_relationship_name=target_relationship_name, + ) + target_schema = get_schema_from_field_annotation(schema.model_fields[target_relationship_name]) target_model = getattr(model, target_relationship_name).property.mapper.class_ if prev_aliased_model: @@ -376,9 +375,9 @@ def gather_relationships_info( def gather_relationships( - entrypoint_model: Type[TypeModel], - schema: Type[TypeSchema], - relationship_paths: Set[str], + entrypoint_model: type[TypeModel], + schema: type[TypeSchema], + relationship_paths: set[str], ) -> dict[RelationshipPath, RelationshipFilteringInfo]: collected_info = {} for relationship_path in sorted(relationship_paths): @@ -393,8 +392,8 @@ def gather_relationships( def prepare_relationships_info( - model: Type[TypeModel], - schema: Type[TypeSchema], + model: type[TypeModel], + schema: type[TypeSchema], filter_info: list, ): # TODO: do this on application startup or use the cache @@ -407,11 +406,11 @@ def prepare_relationships_info( def build_terminal_node_filter_expressions( - filter_item: Dict, - target_schema: Type[TypeSchema], - target_model: Type[TypeModel], - relationships_info: Dict[RelationshipPath, RelationshipFilteringInfo], -): + filter_item: dict, + target_schema: type[TypeSchema], + target_model: type[TypeModel], + relationships_info: dict[RelationshipPath, RelationshipFilteringInfo], +) -> BinaryExpression: name: str = filter_item["name"] if is_relationship_filter(name): *relationship_path, field_name = name.split(RELATIONSHIP_SPLITTER) @@ -435,12 +434,13 @@ def build_terminal_node_filter_expressions( schema_field = target_schema.model_fields[field_name] filter_operator = filter_item["op"] - custom_filter_expression: Callable = get_custom_filter_expression_callable( + custom_filter_sql: CustomFilterSQL | None = get_custom_filter_expression_callable( schema_field=schema_field, operator=filter_operator, ) - if custom_filter_expression is None: + if custom_filter_sql is None: return build_filter_expression( + field_name=field_name, schema_field=schema_field, model_column=model_column, operator=get_operator( @@ -450,40 +450,20 @@ def build_terminal_node_filter_expressions( value=filter_item["val"], ) - custom_call_result = custom_filter_expression( + return custom_filter_sql.get_expression( schema_field=schema_field, model_column=model_column, value=filter_item["val"], operator=filter_operator, ) - if isinstance(custom_call_result, Sequence): - expected_len = 2 - if len(custom_call_result) != expected_len: - log.error( - "Invalid filter, returned sequence length is not %s: %s, len=%s", - expected_len, - custom_call_result, - len(custom_call_result), - ) - raise InvalidFilters(detail="Custom sql filter backend error.") - log.warning( - "Custom filter result of `[expr, [joins]]` is deprecated." - " Please return only filter expression from now on. " - "(triggered on schema field %s for filter operator %s on column %s)", - schema_field, - filter_operator, - model_column, - ) - custom_call_result = custom_call_result[0] - return custom_call_result def build_filter_expressions( - filter_item: Dict, - target_schema: Type[TypeSchema], - target_model: Type[TypeModel], - relationships_info: Dict[RelationshipPath, RelationshipFilteringInfo], -) -> Union[BinaryExpression, BooleanClauseList]: + filter_item: dict, + target_schema: type[TypeSchema], + target_model: type[TypeModel], + relationships_info: dict[RelationshipPath, RelationshipFilteringInfo], +) -> BinaryExpression | BooleanClauseList: """ Return sqla expressions. @@ -532,24 +512,23 @@ def build_filter_expressions( ), ) - expressions = [] - for filter_sub_item in filter_item[logic_operator]: - expressions.append( + return op( + *( build_filter_expressions( filter_item=filter_sub_item, target_schema=target_schema, target_model=target_model, relationships_info=relationships_info, - ), - ) - - return op(*expressions) + ) + for filter_sub_item in filter_item[logic_operator] + ), + ) def create_filters_and_joins( filter_info: list, - model: Type[TypeModel], - schema: Type[TypeSchema], + model: type[TypeModel], + schema: type[TypeSchema], ): relationships_info = prepare_relationships_info( model=model, diff --git a/fastapi_jsonapi/data_layers/filtering/tortoise_operation.py b/fastapi_jsonapi/data_layers/filtering/tortoise_operation.py index 83266c1b..06b5c77c 100644 --- a/fastapi_jsonapi/data_layers/filtering/tortoise_operation.py +++ b/fastapi_jsonapi/data_layers/filtering/tortoise_operation.py @@ -1,6 +1,7 @@ """ Previously used: '__' """ + from typing import Protocol @@ -13,7 +14,7 @@ def add_suffix(field_name: str, suffix: str, sep: str = "__") -> str: :param sep: :return: """ - return "".join((field_name, sep, suffix)) + return f"{field_name}{sep}{suffix}" def type_op_any(field_name: str, type_op: str) -> str: @@ -292,8 +293,7 @@ def type_op_ilike(field_name: str, type_op: str) -> str: class ProcessTypeOperationFieldName(Protocol): - def __call__(self, field_name: str, type_op: str) -> str: - ... + def __call__(self, field_name: str, type_op: str) -> str: ... filters_dict: dict[str, ProcessTypeOperationFieldName] = { diff --git a/fastapi_jsonapi/data_layers/filtering/tortoise_orm.py b/fastapi_jsonapi/data_layers/filtering/tortoise_orm.py index 74b2d8d1..e804fe16 100644 --- a/fastapi_jsonapi/data_layers/filtering/tortoise_orm.py +++ b/fastapi_jsonapi/data_layers/filtering/tortoise_orm.py @@ -1,13 +1,9 @@ """Tortoise filters creator.""" +from __future__ import annotations + from typing import ( Any, - Dict, - List, - Optional, - Tuple, - Type, - Union, ) from pydantic import BaseModel @@ -15,7 +11,6 @@ from tortoise.expressions import Q from tortoise.queryset import QuerySet -from fastapi_jsonapi.data_layers.fields.enum import Enum from fastapi_jsonapi.data_layers.filtering.tortoise_operation import prepare_field_name_for_filtering from fastapi_jsonapi.data_layers.orm import DBORMOperandType from fastapi_jsonapi.data_typing import TypeModel @@ -24,7 +19,7 @@ from fastapi_jsonapi.querystring import QueryStringManager -def prepare_filter_pair(field: Type[ModelField], field_name: str, type_op: str, value: Any) -> Tuple: +def prepare_filter_pair(field: type[ModelField], field_name: str, type_op: str, value: Any) -> tuple: """Prepare filter.""" name_field_q: str = prepare_field_name_for_filtering(field_name, type_op) return name_field_q, value @@ -34,18 +29,17 @@ class FilterTortoiseORM: def __init__(self, model: TypeModel): self.model = model - def create_query(self, filter_q: Union[tuple, Q]) -> Q: + def create_query(self, filter_q: tuple | Q) -> Q: """Tortoise filter creation.""" if isinstance(filter_q, tuple): return Q(**{filter_q[0]: filter_q[1]}) - else: - return Q(filter_q) + return Q(filter_q) def orm_and_or( self, op: DBORMOperandType, filters: list, - ) -> Union[None, QuerySet, Dict[str, Union[QuerySet, List[QuerySet]]]]: + ) -> None | QuerySet | dict[str, QuerySet | list[QuerySet]]: """Filter for query to ORM.""" if not filters: return None @@ -73,9 +67,9 @@ def orm_and_or( def filter_converter( self, - schema: Type[BaseModel], + schema: type[BaseModel], filters: Filters, - ) -> List: + ) -> list: """ Make a list with filters, which can be used in the tortoise filter. @@ -84,17 +78,17 @@ def filter_converter( :return: list of filters, prepared for use in tortoise model. :raises InvalidFilters: if the filter was created with an error. """ - converted_filters: List = [] + converted_filters: list = [] for i_filter in filters: if "or" in i_filter: result = self.filter_converter(schema, i_filter["or"]) converted_filters.append(self.orm_and_or(DBORMOperandType.or_, result)) continue - elif "and" in i_filter: + if "and" in i_filter: result = self.filter_converter(schema, i_filter["and"]) converted_filters.append(self.orm_and_or(DBORMOperandType.and_, result)) continue - elif "not" in i_filter: + if "not" in i_filter: result = self.filter_converter(schema, [i_filter["not"]]) converted_filters.append(self.orm_and_or(DBORMOperandType.not_, result)) continue @@ -113,7 +107,7 @@ def filter_converter( ) converted_filters.append(result) else: - val: Union[List[Any], Any] + val: list[Any] | Any field: ModelField = schema.model_fields[name_field] if isinstance(i_filter["val"], list) and field.annotation is not list: val = self._validate(i_filter, field) @@ -129,7 +123,7 @@ def filter_converter( async def json_api_filter( self, query, - schema: Type[BaseModel], + schema: type[BaseModel], query_params: QueryStringManager, ) -> QuerySet: """Make queries with filtering from request.""" @@ -143,11 +137,9 @@ async def json_api_filter( def val_to_query(self, val: Any) -> Any: """Value to query.""" - if isinstance(val, Enum): - val = val.value return val - def _validate(self, json_api_filter: Dict[str, List[str]], model_filed: ModelField) -> List: + def _validate(self, json_api_filter: dict[str, list[str]], model_filed: ModelField) -> list: val = [] for i_v in json_api_filter["val"]: i_val, errors = model_filed.validate(i_v, {}, loc=model_filed.alias) @@ -157,7 +149,10 @@ def _validate(self, json_api_filter: Dict[str, List[str]], model_filed: ModelFie val.append(i_val) return val - def validate(self, filter_q: Union[None, Q, Dict[str, Union[Q, List[Q]]]]) -> Optional[Q]: + def validate( + self, + filter_q: None | Q | dict[str, Q | list[Q]], + ) -> Q | None: """ Tortoise filter validation. @@ -167,8 +162,7 @@ def validate(self, filter_q: Union[None, Q, Dict[str, Union[Q, List[Q]]]]) -> Op """ if isinstance(filter_q, Q): return Q(filter_q) - elif filter_q is None: + if filter_q is None: return None - else: - msg = "An unexpected argument for Q (result_filter={type})".format(type=type(filter_q)) - raise QueryError(msg) + msg = f"An unexpected argument for Q (result_filter={type(filter_q)})" + raise QueryError(msg) diff --git a/fastapi_jsonapi/data_layers/node_shared.py b/fastapi_jsonapi/data_layers/node_shared.py new file mode 100644 index 00000000..28355bdc --- /dev/null +++ b/fastapi_jsonapi/data_layers/node_shared.py @@ -0,0 +1,169 @@ +from __future__ import annotations + +from typing import Any + +from pydantic.fields import FieldInfo +from sqlalchemy.orm import ( + DeclarativeMeta, + InstrumentedAttribute, + aliased, +) +from sqlalchemy.sql.elements import BinaryExpression + +from fastapi_jsonapi.data_typing import ( + TypeModel, + TypeSchema, +) +from fastapi_jsonapi.exceptions import ( + InvalidFilters, + InvalidSort, +) +from fastapi_jsonapi.schema import ( + get_model_field, + get_relationship_fields_names, + get_schema_from_field_annotation, +) +from fastapi_jsonapi.splitter import SPLIT_REL +from fastapi_jsonapi.utils.sqla import get_related_model_cls + +Sort = BinaryExpression +Join = list[Any] +SortAndJoins = tuple[ + Sort, + list[Join], +] + + +class Node: + """Helper to recursively create sorts with sqlalchemy according to sort querystring parameter""" + + def __init__(self, model: type[TypeModel], sort_: dict, schema: type[TypeSchema]): + """ + Initialize an instance of a filter node. + + :params model: an sqlalchemy model. + :params sort_: sorts information of the current node and deeper nodes. + :param schema: the serializer of the resource. + """ + self.model = model + self.sort_ = sort_ + self.schema = schema + + @classmethod + def create_sort(cls, schema_field: FieldInfo, model_column, order: str): + """ + Create sqlalchemy sort. + + :params schema_field: + :params model_column: column sqlalchemy + :params order: desc | asc (or custom) + :return: + """ + """ + Custom sqlachemy sorting logic can be created in a marshmallow field for any field + You can override existing ('asc', 'desc') or create new - then follow this pattern: + `__sql_sort_`. This method has to accept following params: + * marshmallow_field - marshmallow field instance + * model_column - sqlalchemy column instance + """ + try: + f = getattr(schema_field, f"_{order}_sql_sort_") + except AttributeError: + pass + else: + return f( + schema_field=schema_field, + model_column=model_column, + ) + return getattr(model_column, order)() + + def resolve(self) -> SortAndJoins: + """Create sort for a particular node of the sort tree.""" + field = self.sort_.get("field", "") + if not hasattr(self.model, field) and SPLIT_REL not in field: + msg = f"{self.model.__name__} has no attribute {field}" + raise InvalidSort(msg) + + if SPLIT_REL in field: + value = {"field": SPLIT_REL.join(field.split(SPLIT_REL)[1:]), "order": self.sort_["order"]} + alias = aliased(self.related_model) + joins = [[alias, self.column]] + node = Node(alias, value, self.related_schema) + filters, new_joins = node.resolve() + joins.extend(new_joins) + return filters, joins + + return ( + self.create_sort( + schema_field=self.schema.model_fields[self.name].annotation, + model_column=self.column, + order=self.sort_["order"], + ), + [], + ) + + @property + def name(self) -> str: + """ + Return the name of the node or raise a BadRequest exception + + :return str: the name of the sort to sort on + """ + name = self.sort_.get("field") + + if name is None: + msg = "Can't find name of a sort" + raise InvalidFilters(msg) + + if SPLIT_REL in name: + name = name.split(SPLIT_REL)[0] + + if name not in self.schema.model_fields: + msg = f"{self.schema.__name__} has no attribute {name}" + raise InvalidFilters(msg) + + return name + + @property + def column(self) -> InstrumentedAttribute: + """ + Get the column object. + + :return: the column to filter on + """ + field = self.name + + model_field = get_model_field(self.schema, field) + + try: + return getattr(self.model, model_field) + except AttributeError: + msg = f"{self.model.__name__} has no attribute {model_field}" + raise InvalidFilters(msg) + + def validate_field_relationship(self, relationship_field: str) -> None: + if relationship_field not in get_relationship_fields_names(self.schema): + msg = f"{self.schema.__name__!r} has no relationship attribute {relationship_field!r}" + raise InvalidFilters(msg) + + @property + def related_model(self) -> DeclarativeMeta: + """ + Get the related model of a relationship field. + + :return: the related model. + """ + relationship_field = self.name + self.validate_field_relationship(relationship_field) + return get_related_model_cls(self.model, get_model_field(self.schema, relationship_field)) + + @property + def related_schema(self) -> type[TypeSchema]: + """ + Get the related schema of a relationship field. + + :return: the related schema + """ + relationship_field = self.name + self.validate_field_relationship(relationship_field) + return get_schema_from_field_annotation(self.schema.model_fields[relationship_field]) diff --git a/fastapi_jsonapi/data_layers/orm.py b/fastapi_jsonapi/data_layers/orm.py index 0ffee4bd..468a3299 100644 --- a/fastapi_jsonapi/data_layers/orm.py +++ b/fastapi_jsonapi/data_layers/orm.py @@ -1,9 +1,7 @@ -"""ORM types enums.""" +from enum import Enum -from fastapi_jsonapi.data_layers.fields.enum import Enum - -class DBORMOperandType(str, Enum): +class DBORMOperandType(str, Enum): # noqa: SLOT000 or_ = "or" and_ = "and" not_ = "not" diff --git a/fastapi_jsonapi/data_layers/shared.py b/fastapi_jsonapi/data_layers/shared.py index 113359f4..de6ae37f 100644 --- a/fastapi_jsonapi/data_layers/shared.py +++ b/fastapi_jsonapi/data_layers/shared.py @@ -1,17 +1,19 @@ -from typing import TYPE_CHECKING, Tuple, Type, Union +from __future__ import annotations + +from typing import TYPE_CHECKING from fastapi_jsonapi.data_typing import TypeModel, TypeSchema if TYPE_CHECKING: - from fastapi_jsonapi.data_layers.filtering.sqlalchemy import Node as NodeSQLAlchemy + from fastapi_jsonapi.data_layers.node_shared import Node as NodeSQLAlchemy def create_filters_or_sorts( - model: Type[TypeModel], - filter_or_sort_info: Union[list, dict], - class_node: Union[Type["NodeSQLAlchemy"]], - schema: Type[TypeSchema], -) -> Tuple: + model: type[TypeModel], + filter_or_sort_info: list | dict, + class_node: type[NodeSQLAlchemy], + schema: type[TypeSchema], +) -> tuple: """ Apply filters / sorts from filters / sorts information to base query diff --git a/fastapi_jsonapi/data_layers/sorting/sqlalchemy.py b/fastapi_jsonapi/data_layers/sorting/sqlalchemy.py index b801ca55..02ae8e3d 100644 --- a/fastapi_jsonapi/data_layers/sorting/sqlalchemy.py +++ b/fastapi_jsonapi/data_layers/sorting/sqlalchemy.py @@ -1,27 +1,13 @@ """Helper to create sqlalchemy sortings according to filter querystring parameter""" -from typing import Any, List, Tuple, Type, Union -from pydantic.fields import FieldInfo -from sqlalchemy.orm import DeclarativeMeta, InstrumentedAttribute, aliased -from sqlalchemy.sql.elements import BinaryExpression +from __future__ import annotations +from fastapi_jsonapi.data_layers.node_shared import Node from fastapi_jsonapi.data_layers.shared import create_filters_or_sorts from fastapi_jsonapi.data_typing import TypeModel, TypeSchema -from fastapi_jsonapi.exceptions import InvalidFilters, InvalidSort -from fastapi_jsonapi.schema import get_model_field, get_relationships -from fastapi_jsonapi.splitter import SPLIT_REL -from fastapi_jsonapi.utils.sqla import get_related_model_cls -Sort = BinaryExpression -Join = List[Any] -SortAndJoins = Tuple[ - Sort, - List[Join], -] - - -def create_sorts(model: Type[TypeModel], filter_info: Union[list, dict], schema: Type[TypeSchema]): +def create_sorts(model: type[TypeModel], filter_info: list | dict, schema: type[TypeSchema]): """ Apply filters from filters information to base query. @@ -30,143 +16,3 @@ def create_sorts(model: Type[TypeModel], filter_info: Union[list, dict], schema: :params schema: the resource. """ return create_filters_or_sorts(model, filter_info, Node, schema) - - -class Node(object): - """Helper to recursively create sorts with sqlalchemy according to sort querystring parameter""" - - def __init__(self, model: Type[TypeModel], sort_: dict, schema: Type[TypeSchema]): - """ - Initialize an instance of a filter node. - - :params model: an sqlalchemy model. - :params sort_: sorts information of the current node and deeper nodes. - :param schema: the serializer of the resource. - """ - self.model = model - self.sort_ = sort_ - self.schema = schema - - @classmethod - def create_sort(cls, schema_field: FieldInfo, model_column, order: str): - """ - Create sqlalchemy sort. - - :params schema_field: - :params model_column: column sqlalchemy - :params order: desc | asc (or custom) - :return: - """ - """ - Custom sqlachemy sorting logic can be created in a marshmallow field for any field - You can override existing ('asc', 'desc') or create new - then follow this pattern: - `__sql_sort_`. This method has to accept following params: - * marshmallow_field - marshmallow field instance - * model_column - sqlalchemy column instance - """ - try: - f = getattr(schema_field, f"_{order}_sql_sort_") - except AttributeError: - pass - else: - return f( - schema_field=schema_field, - model_column=model_column, - ) - return getattr(model_column, order)() - - def resolve(self) -> SortAndJoins: - """ - Create sort for a particular node of the sort tree. - """ - field = self.sort_.get("field", "") - if not hasattr(self.model, field) and SPLIT_REL not in field: - msg = "{} has no attribute {}".format(self.model.__name__, field) - raise InvalidSort(msg) - - if SPLIT_REL in field: - value = {"field": SPLIT_REL.join(field.split(SPLIT_REL)[1:]), "order": self.sort_["order"]} - alias = aliased(self.related_model) - joins = [[alias, self.column]] - node = Node(alias, value, self.related_schema) - filters, new_joins = node.resolve() - joins.extend(new_joins) - return filters, joins - - return ( - self.create_sort( - schema_field=self.schema.model_fields[self.name].annotation, - model_column=self.column, - order=self.sort_["order"], - ), - [], - ) - - @property - def name(self) -> str: - """ - Return the name of the node or raise a BadRequest exception - - :return str: the name of the sort to sort on - """ - name = self.sort_.get("field") - - if name is None: - msg = "Can't find name of a sort" - raise InvalidFilters(msg) - - if SPLIT_REL in name: - name = name.split(SPLIT_REL)[0] - - if name not in self.schema.model_fields: - msg = "{} has no attribute {}".format(self.schema.__name__, name) - raise InvalidFilters(msg) - - return name - - @property - def column(self) -> InstrumentedAttribute: - """ - Get the column object. - - :return: the column to filter on - """ - field = self.name - - model_field = get_model_field(self.schema, field) - - try: - return getattr(self.model, model_field) - except AttributeError: - msg = "{} has no attribute {}".format(self.model.__name__, model_field) - raise InvalidFilters(msg) - - @property - def related_model(self) -> DeclarativeMeta: - """ - Get the related model of a relationship field. - - :return: the related model. - """ - relationship_field = self.name - - if relationship_field not in get_relationships(self.schema): - msg = "{} has no relationship attribute {}".format(self.schema.__name__, relationship_field) - raise InvalidFilters(msg) - - return get_related_model_cls(self.model, get_model_field(self.schema, relationship_field)) - - @property - def related_schema(self) -> Type[TypeSchema]: - """ - Get the related schema of a relationship field. - - :return: the related schema - """ - relationship_field = self.name - - if relationship_field not in get_relationships(self.schema): - msg = "{} has no relationship attribute {}".format(self.schema.__name__, relationship_field) - raise InvalidFilters(msg) - - return self.schema.model_fields[relationship_field].annotation diff --git a/fastapi_jsonapi/data_layers/sorting/tortoise_orm.py b/fastapi_jsonapi/data_layers/sorting/tortoise_orm.py index 59f43f7c..a02e3ee9 100644 --- a/fastapi_jsonapi/data_layers/sorting/tortoise_orm.py +++ b/fastapi_jsonapi/data_layers/sorting/tortoise_orm.py @@ -1,5 +1,3 @@ -from typing import Dict, List - from tortoise.queryset import QuerySet @@ -8,7 +6,7 @@ class SortTortoiseORM: def sort( cls, query: QuerySet, - query_params_sorting: List[Dict[str, str]], + query_params_sorting: list[dict[str, str]], default_sort: str = "", ) -> QuerySet: """ diff --git a/fastapi_jsonapi/data_layers/sqla_orm.py b/fastapi_jsonapi/data_layers/sqla_orm.py index 233e9b81..f2cfb5b5 100644 --- a/fastapi_jsonapi/data_layers/sqla_orm.py +++ b/fastapi_jsonapi/data_layers/sqla_orm.py @@ -1,17 +1,19 @@ """This module is a CRUD interface between resource managers and the sqlalchemy ORM""" + +from __future__ import annotations + import logging -from typing import TYPE_CHECKING, Any, Iterable, List, Literal, Optional, Tuple, Type, Union +from typing import TYPE_CHECKING, Any, Literal, Union from sqlalchemy import delete, func, select from sqlalchemy.exc import DBAPIError, IntegrityError, MissingGreenlet, NoResultFound -from sqlalchemy.ext.asyncio import AsyncSession, AsyncSessionTransaction from sqlalchemy.inspection import inspect from sqlalchemy.orm import joinedload, selectinload -from sqlalchemy.orm.attributes import InstrumentedAttribute from sqlalchemy.orm.collections import InstrumentedList from sqlalchemy.sql import column, distinct from fastapi_jsonapi import BadRequest +from fastapi_jsonapi.common import get_relationship_info_from_field_metadata from fastapi_jsonapi.data_layers.base import BaseDataLayer from fastapi_jsonapi.data_layers.filtering.sqlalchemy import ( create_filters_and_joins, @@ -26,22 +28,28 @@ RelatedObjectNotFound, RelationNotFound, ) -from fastapi_jsonapi.querystring import PaginationQueryStringManager, QueryStringManager from fastapi_jsonapi.schema import ( BaseJSONAPIItemInSchema, BaseJSONAPIRelationshipDataToManySchema, BaseJSONAPIRelationshipDataToOneSchema, + JSONAPISchemaIntrospectionError, get_model_field, get_related_schema, ) -from fastapi_jsonapi.schema_base import RelationshipInfo from fastapi_jsonapi.splitter import SPLIT_REL from fastapi_jsonapi.utils.sqla import get_related_model_cls if TYPE_CHECKING: + from collections.abc import Iterable + from pydantic import BaseModel as PydanticBaseModel + from sqlalchemy.ext.asyncio import AsyncSession, AsyncSessionTransaction + from sqlalchemy.orm.attributes import InstrumentedAttribute from sqlalchemy.sql import Select + from fastapi_jsonapi.querystring import PaginationQueryStringManager, QueryStringManager + from fastapi_jsonapi.types_metadata import RelationshipInfo + log = logging.getLogger(__name__) ModelTypeOneOrMany = Union[TypeModel, list[TypeModel]] @@ -53,15 +61,16 @@ class SqlalchemyDataLayer(BaseDataLayer): def __init__( self, - schema: Type[TypeSchema], - model: Type[TypeModel], + schema: type[TypeSchema], + model: type[TypeModel], session: AsyncSession, + *, disable_collection_count: bool = False, default_collection_count: int = -1, - id_name_field: Optional[str] = None, + id_name_field: str | None = None, url_id_field: str = "id", eagerload_includes: bool = True, - query: Optional["Select"] = None, + query: Select | None = None, auto_convert_id_to_column_type: bool = True, **kwargs: Any, ): @@ -93,9 +102,12 @@ def __init__( self.eagerload_includes_ = eagerload_includes self._query = query self.auto_convert_id_to_column_type = auto_convert_id_to_column_type - self.transaction: Optional[AsyncSessionTransaction] = None + self.transaction: AsyncSessionTransaction | None = None - async def atomic_start(self, previous_dl: Optional["SqlalchemyDataLayer"] = None): + async def atomic_start( + self, + previous_dl: SqlalchemyDataLayer | None = None, + ) -> None: self.is_atomic = True if previous_dl: self.session = previous_dl.session @@ -106,7 +118,12 @@ async def atomic_start(self, previous_dl: Optional["SqlalchemyDataLayer"] = None self.transaction = self.session.begin() await self.transaction.start() - async def atomic_end(self, success: bool = True): + async def atomic_end( + self, + *, + success: bool = True, + exception: Exception | None = None, + ): if success: await self.transaction.commit() else: @@ -141,8 +158,8 @@ async def link_relationship_object( self, obj: TypeModel, relation_name: str, - related_data: Optional[ModelTypeOneOrMany], - action_trigger: ActionTrigger, + related_data: ModelTypeOneOrMany | None, + action_trigger: ActionTrigger, # noqa: ARG002 ): """ Links target object with relationship object or objects @@ -177,11 +194,8 @@ async def get_related_data_to_link( self, related_model: TypeModel, relationship_info: RelationshipInfo, - relationship_in: Union[ - BaseJSONAPIRelationshipDataToOneSchema, - BaseJSONAPIRelationshipDataToManySchema, - ], - ) -> Optional[ModelTypeOneOrMany]: + relationship_in: BaseJSONAPIRelationshipDataToOneSchema | BaseJSONAPIRelationshipDataToManySchema, + ) -> ModelTypeOneOrMany | None: """ Retrieves object or objects to link from database @@ -221,7 +235,7 @@ async def apply_relationships( :param action_trigger: indicates which one operation triggered relationships applying :return: """ - relationships: "PydanticBaseModel" = data_create.relationships + relationships: PydanticBaseModel = data_create.relationships if relationships is None: return @@ -237,14 +251,9 @@ async def apply_relationships( log.warning("field for %s in schema %s not found", relation_name, self.schema.__name__) continue - if "relationship" not in field.json_schema_extra: - log.warning( - "relationship info for %s in schema %s extra not found", - relation_name, - self.schema.__name__, - ) + relationship_info: RelationshipInfo | None = get_relationship_info_from_field_metadata(field) + if relationship_info is None: continue - relationship_info: RelationshipInfo = field.json_schema_extra["relationship"] related_model = get_related_model_cls(type(obj), relation_name) related_data = await self.get_related_data_to_link( related_model=related_model, @@ -300,7 +309,7 @@ def get_object_id_field_name(self): """ return self.id_name_field or inspect(self.model).primary_key[0].key - async def get_object(self, view_kwargs: dict, qs: Optional[QueryStringManager] = None) -> TypeModel: + async def get_object(self, view_kwargs: dict, qs: QueryStringManager | None = None) -> TypeModel: """ Retrieve an object through sqlalchemy. @@ -331,7 +340,7 @@ async def get_object(self, view_kwargs: dict, qs: Optional[QueryStringManager] = return obj - async def get_collection_count(self, query: "Select", qs: QueryStringManager, view_kwargs: dict) -> int: + async def get_collection_count(self, query: Select, qs: QueryStringManager, view_kwargs: dict) -> int: """ Returns number of elements for this collection @@ -346,7 +355,7 @@ async def get_collection_count(self, query: "Select", qs: QueryStringManager, vi count_query = select(func.count(distinct(column("id")))).select_from(query.subquery()) return (await self.session.execute(count_query)).scalar_one() - async def get_collection(self, qs: QueryStringManager, view_kwargs: Optional[dict] = None) -> Tuple[int, list]: + async def get_collection(self, qs: QueryStringManager, view_kwargs: dict | None = None) -> tuple[int, list]: """ Retrieve a collection of objects through sqlalchemy. @@ -406,6 +415,7 @@ async def update_object( # TODO: get field alias (if present) and get attribute by alias (rarely used, but required) if (old_value := getattr(obj, field_name, missing)) is missing: + # TODO: tests coverage log.warning("No field %r on %s. Make sure schema conforms model.", field_name, type(obj)) continue @@ -429,7 +439,7 @@ async def update_object( await self.session.rollback() err_message = f"Got an error {e.__class__.__name__} during updating obj {view_kwargs} data in DB" - log.error(err_message, exc_info=e) + log.exception(err_message, exc_info=e) raise InternalServerError( detail=err_message, @@ -461,7 +471,7 @@ async def delete_object(self, obj: TypeModel, view_kwargs: dict): await self.session.rollback() err_message = f"Got an error {e.__class__.__name__} deleting object {view_kwargs}" - log.error(err_message, exc_info=e) + log.exception(err_message, exc_info=e) raise InternalServerError( detail=err_message, @@ -474,9 +484,9 @@ async def delete_object(self, obj: TypeModel, view_kwargs: dict): await self.after_delete_object(obj, view_kwargs) - async def delete_objects(self, objects: List[TypeModel], view_kwargs: dict): + async def delete_objects(self, objects: list[TypeModel], view_kwargs: dict): await self.before_delete_objects(objects, view_kwargs) - query = delete(self.model).filter(self.model.id.in_((obj.id for obj in objects))) + query = delete(self.model).filter(self.model.id.in_(obj.id for obj in objects)) try: await self.session.execute(query) @@ -505,7 +515,6 @@ async def create_relationship( :param view_kwargs: kwargs from the resource view. :return: True if relationship have changed else False. """ - pass async def get_relationship( self, @@ -513,7 +522,7 @@ async def get_relationship( related_type_: str, related_id_field: str, view_kwargs: dict, - ) -> Tuple[Any, Any]: + ) -> tuple[Any, Any]: """ Get a relationship. @@ -555,8 +564,7 @@ async def get_relationship( if isinstance(related_objects, InstrumentedList): return obj, [{"type": related_type_, "id": getattr(obj_, related_id_field)} for obj_ in related_objects] - else: - return obj, {"type": related_type_, "id": getattr(related_objects, related_id_field)} + return obj, {"type": related_type_, "id": getattr(related_objects, related_id_field)} async def update_relationship( self, @@ -566,7 +574,6 @@ async def update_relationship( view_kwargs: dict, ) -> bool: """ - Update a relationship :param json_data: the request params. @@ -594,8 +601,8 @@ async def delete_relationship( def get_related_model_query_base( self, - related_model: Type[TypeModel], - ) -> "Select": + related_model: type[TypeModel], + ) -> Select: """ Prepare sql query (statement) to fetch related model @@ -606,29 +613,29 @@ def get_related_model_query_base( def get_related_object_query( self, - related_model: Type[TypeModel], + related_model: type[TypeModel], related_id_field: str, id_value: str, ): id_field = getattr(related_model, related_id_field) id_value = self.prepare_id_value(id_field, id_value) - stmt: "Select" = self.get_related_model_query_base(related_model) + stmt: Select = self.get_related_model_query_base(related_model) return stmt.where(id_field == id_value) def get_related_objects_list_query( self, - related_model: Type[TypeModel], + related_model: type[TypeModel], related_id_field: str, ids: list[str], - ) -> Tuple["Select", list[str]]: + ) -> tuple[Select, list[str]]: id_field = getattr(related_model, related_id_field) prepared_ids = [self.prepare_id_value(id_field, _id) for _id in ids] - stmt: "Select" = self.get_related_model_query_base(related_model) + stmt: Select = self.get_related_model_query_base(related_model) return stmt.where(id_field.in_(prepared_ids)), prepared_ids async def get_related_object( self, - related_model: Type[TypeModel], + related_model: type[TypeModel], related_id_field: str, id_value: str, ) -> TypeModel: @@ -656,7 +663,7 @@ async def get_related_object( async def get_related_objects_list( self, - related_model: Type[TypeModel], + related_model: type[TypeModel], related_id_field: str, ids: list[str], ) -> list[TypeModel]: @@ -687,7 +694,7 @@ async def get_related_objects_list( return list(related_objects) - def filter_query(self, query: "Select", filter_info: Optional[list]) -> "Select": + def filter_query(self, query: Select, filter_info: list | None) -> Select: """ Filter query according to jsonapi 1.0. @@ -709,7 +716,7 @@ def filter_query(self, query: "Select", filter_info: Optional[list]) -> "Select" return query - def sort_query(self, query: "Select", sort_info: list) -> "Select": + def sort_query(self, query: Select, sort_info: list) -> Select: """ Sort query according to jsonapi 1.0. @@ -725,7 +732,7 @@ def sort_query(self, query: "Select", sort_info: list) -> "Select": query = query.order_by(i_sort) return query - def paginate_query(self, query: "Select", paginate_info: PaginationQueryStringManager) -> "Select": + def paginate_query(self, query: Select, paginate_info: PaginationQueryStringManager) -> Select: """ Paginate query according to jsonapi 1.0. @@ -742,7 +749,7 @@ def paginate_query(self, query: "Select", paginate_info: PaginationQueryStringMa return query - def eagerload_includes(self, query: "Select", qs: QueryStringManager) -> "Select": + def eagerload_includes(self, query: Select, qs: QueryStringManager) -> Select: """ Use eagerload feature of sqlalchemy to optimize data retrieval for include querystring parameter. @@ -758,7 +765,7 @@ def eagerload_includes(self, query: "Select", qs: QueryStringManager) -> "Select for related_field_name in include.split(SPLIT_REL): try: field_name_to_load = get_model_field(current_schema, related_field_name) - except Exception as e: + except (JSONAPISchemaIntrospectionError, AttributeError) as e: raise InvalidInclude(str(e)) field_to_load: InstrumentedAttribute = getattr(current_model, field_name_to_load) @@ -785,7 +792,7 @@ def retrieve_object_query( view_kwargs: dict, filter_field: InstrumentedAttribute, filter_value: Any, - ) -> "Select": + ) -> Select: """ Build query to retrieve object. @@ -795,10 +802,10 @@ def retrieve_object_query( :return sqlalchemy query: a query from sqlalchemy """ value = self.prepare_id_value(filter_field, filter_value) - query: "Select" = self.query(view_kwargs).where(filter_field == value) + query: Select = self.query(view_kwargs).where(filter_field == value) return query - def query(self, view_kwargs: dict) -> "Select": + def query(self, view_kwargs: dict) -> Select: """ Construct the base query to retrieve wanted data. @@ -827,7 +834,6 @@ async def after_create_object(self, obj: TypeModel, model_kwargs: dict, view_kwa :param model_kwargs: the data validated by pydantic. :param view_kwargs: kwargs from the resource view. """ - pass async def before_get_object(self, view_kwargs: dict): """ @@ -835,7 +841,6 @@ async def before_get_object(self, view_kwargs: dict): :param view_kwargs: kwargs from the resource view. """ - pass async def after_get_object(self, obj: Any, view_kwargs: dict): """ @@ -844,7 +849,6 @@ async def after_get_object(self, obj: Any, view_kwargs: dict): :param obj: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass async def before_get_collection(self, qs: QueryStringManager, view_kwargs: dict): """ @@ -853,7 +857,6 @@ async def before_get_collection(self, qs: QueryStringManager, view_kwargs: dict) :param qs: a querystring manager to retrieve information from url. :param view_kwargs: kwargs from the resource view. """ - pass async def after_get_collection(self, collection: Iterable, qs: QueryStringManager, view_kwargs: dict): """ @@ -873,7 +876,6 @@ async def before_update_object(self, obj: Any, model_kwargs: dict, view_kwargs: :param model_kwargs: the data validated by schemas. :param view_kwargs: kwargs from the resource view. """ - pass async def after_update_object(self, obj: Any, model_kwargs: dict, view_kwargs: dict): """ @@ -883,7 +885,6 @@ async def after_update_object(self, obj: Any, model_kwargs: dict, view_kwargs: d :param model_kwargs: the data validated by schemas. :param view_kwargs: kwargs from the resource view. """ - pass async def before_delete_object(self, obj: TypeModel, view_kwargs: dict): """ @@ -892,7 +893,6 @@ async def before_delete_object(self, obj: TypeModel, view_kwargs: dict): :param obj: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass async def after_delete_object(self, obj: TypeModel, view_kwargs: dict): """ @@ -901,25 +901,22 @@ async def after_delete_object(self, obj: TypeModel, view_kwargs: dict): :param obj: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass - async def before_delete_objects(self, objects: List[TypeModel], view_kwargs: dict): + async def before_delete_objects(self, objects: list[TypeModel], view_kwargs: dict): """ Make checks before deleting objects. :param objects: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass - async def after_delete_objects(self, objects: List[TypeModel], view_kwargs: dict): + async def after_delete_objects(self, objects: list[TypeModel], view_kwargs: dict): """ Any actions after deleting objects. :param objects: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass async def before_create_relationship( self, @@ -937,7 +934,6 @@ async def before_create_relationship( :param view_kwargs: kwargs from the resource view. :return boolean: True if relationship have changed else False. """ - pass async def after_create_relationship( self, @@ -959,7 +955,6 @@ async def after_create_relationship( :param view_kwargs: kwargs from the resource view. :return boolean: True if relationship have changed else False. """ - pass async def before_get_relationship( self, @@ -977,7 +972,6 @@ async def before_get_relationship( :param dict view_kwargs: kwargs from the resource view. :return tuple: the object and related object(s). """ - pass async def after_get_relationship( self, @@ -999,7 +993,6 @@ async def after_get_relationship( :param view_kwargs: kwargs from the resource view. :return tuple: the object and related object(s). """ - pass async def before_update_relationship( self, @@ -1017,7 +1010,6 @@ async def before_update_relationship( :param view_kwargs: kwargs from the resource view. :return boolean: True if relationship have changed else False. """ - pass async def after_update_relationship( self, @@ -1039,7 +1031,6 @@ async def after_update_relationship( :param view_kwargs: kwargs from the resource view. :return boolean: True if relationship have changed else False. """ - pass async def before_delete_relationship( self, @@ -1056,7 +1047,6 @@ async def before_delete_relationship( :param related_id_field: the identifier field of the related model. :param view_kwargs: kwargs from the resource view. """ - pass async def after_delete_relationship( self, @@ -1077,4 +1067,3 @@ async def after_delete_relationship( :param related_id_field: the identifier field of the related model. :param view_kwargs: kwargs from the resource view. """ - pass diff --git a/fastapi_jsonapi/data_layers/tortoise_orm.py b/fastapi_jsonapi/data_layers/tortoise_orm.py index def07871..216b3219 100644 --- a/fastapi_jsonapi/data_layers/tortoise_orm.py +++ b/fastapi_jsonapi/data_layers/tortoise_orm.py @@ -1,7 +1,11 @@ """This module is a CRUD interface between resource managers and the Tortoise ORM""" -from typing import Any, Iterable, Optional, Tuple, Type +from __future__ import annotations +from collections.abc import Iterable +from typing import Any + +from fastapi import Request from tortoise.queryset import QuerySet from fastapi_jsonapi.data_layers.base import BaseDataLayer @@ -17,13 +21,15 @@ class TortoiseDataLayer(BaseDataLayer): def __init__( self, - schema: Type[TypeSchema], - model: Type[TypeModel], + request: Request, + schema: type[TypeSchema], + model: type[TypeModel], + *, disable_collection_count: bool = False, default_collection_count: int = -1, - id_name_field: Optional[str] = None, + id_name_field: str | None = None, url_id_field: str = "id", - query: Optional[QuerySet] = None, + query: QuerySet | None = None, **kwargs: Any, ): """ @@ -38,6 +44,7 @@ def __init__( :param kwargs: initialization parameters of an TortoiseDataLayer instance """ super().__init__( + request=request, schema=schema, model=model, url_id_field=url_id_field, @@ -57,7 +64,7 @@ async def create_object(self, data_create: BaseJSONAPIItemInSchema, view_kwargs: :return DeclarativeMeta: an object """ - async def get_object(self, view_kwargs: dict, qs: Optional[QueryStringManager] = None) -> TypeModel: + async def get_object(self, view_kwargs: dict, qs: QueryStringManager | None = None) -> TypeModel: """ Retrieve an object @@ -80,7 +87,7 @@ async def get_collection_count(self, query: QuerySet) -> int: return await query.count() - async def get_collection(self, qs: QueryStringManager, view_kwargs: Optional[dict] = None) -> Tuple[int, list]: + async def get_collection(self, qs: QueryStringManager, view_kwargs: dict | None = None) -> tuple[int, list]: """ Retrieve a collection of objects through Tortoise. @@ -157,7 +164,7 @@ async def get_relationship( related_type_: str, related_id_field: str, view_kwargs: dict, - ) -> Tuple[Any, Any]: + ) -> tuple[Any, Any]: """ Get a relationship. @@ -203,7 +210,7 @@ async def delete_relationship( async def get_related_object( self, - related_model: Type[TypeModel], + related_model: type[TypeModel], related_id_field: str, id_value: str, ) -> TypeModel: @@ -257,7 +264,10 @@ def retrieve_object_query( :return Tortoise query: a query from Tortoise """ - def query(self, view_kwargs: dict) -> QuerySet: + def query( + self, + view_kwargs: dict, + ) -> QuerySet: """ Construct the base query to retrieve wanted data. @@ -305,7 +315,12 @@ async def before_get_collection(self, qs: QueryStringManager, view_kwargs: dict) :param view_kwargs: kwargs from the resource view. """ - async def after_get_collection(self, collection: Iterable, qs: QueryStringManager, view_kwargs: dict) -> Iterable: + async def after_get_collection( + self, + collection: Iterable, + qs: QueryStringManager, # noqa: ARG002 + view_kwargs: dict, # noqa: ARG002 + ) -> Iterable: """ Make work after to retrieve a collection of objects. diff --git a/fastapi_jsonapi/exceptions/base.py b/fastapi_jsonapi/exceptions/base.py index 058ce7f8..56262386 100644 --- a/fastapi_jsonapi/exceptions/base.py +++ b/fastapi_jsonapi/exceptions/base.py @@ -1,10 +1,9 @@ """Collection of useful http error for the Api.""" +from __future__ import annotations + from typing import ( Any, - Dict, - List, - Optional, ) from pydantic import Field @@ -14,15 +13,15 @@ class ExceptionSourceSchema(BaseModel): """Source exception schema.""" - parameter: Optional[str] = None - pointer: Optional[str] = None + parameter: str | None = None + pointer: str | None = None class ExceptionSchema(BaseModel): """Exception schema.""" status: str - source: Optional[ExceptionSourceSchema] = None + source: ExceptionSourceSchema | None = None title: str detail: Any = None @@ -30,8 +29,8 @@ class ExceptionSchema(BaseModel): class ExceptionResponseSchema(BaseModel): """Exception response schema.""" - errors: List[ExceptionSchema] - jsonapi: Dict[str, str] = Field(default={"version": "1.0"}) + errors: list[ExceptionSchema] + jsonapi: dict[str, str] = Field(default={"version": "1.0"}) class QueryError(Exception): diff --git a/fastapi_jsonapi/exceptions/handlers.py b/fastapi_jsonapi/exceptions/handlers.py index 043af0e5..dff4f864 100644 --- a/fastapi_jsonapi/exceptions/handlers.py +++ b/fastapi_jsonapi/exceptions/handlers.py @@ -4,7 +4,10 @@ from fastapi_jsonapi.exceptions import HTTPException -async def base_exception_handler(request: Request, exc: HTTPException): +async def base_exception_handler( + request: Request, + exc: HTTPException, +) -> JSONResponse: return JSONResponse( status_code=exc.status_code, content={"errors": [exc.as_dict]}, diff --git a/fastapi_jsonapi/exceptions/json_api.py b/fastapi_jsonapi/exceptions/json_api.py index 0fb548ba..99aa1389 100644 --- a/fastapi_jsonapi/exceptions/json_api.py +++ b/fastapi_jsonapi/exceptions/json_api.py @@ -1,11 +1,10 @@ """JSON API exceptions schemas.""" +from __future__ import annotations + from http import HTTPStatus from typing import ( Any, - List, - Optional, - Union, ) from fastapi import HTTPException as FastApiHttpException @@ -21,13 +20,13 @@ class HTTPException(FastApiHttpException): def __init__( self, - detail: Union[str, dict] = "", + detail: str | dict = "", pointer: str = "", parameter: str = "", - title: Optional[str] = None, - status_code: Optional[int] = None, - errors: Optional[List["HTTPException"]] = None, - meta: Optional[dict[str, Any]] = None, + title: str | None = None, + status_code: int | None = None, + errors: list[HTTPException] | None = None, + meta: dict[str, Any] | None = None, ): """ Init base HTTP exception. @@ -95,16 +94,6 @@ class InternalServerError(HTTPException): status_code = status.HTTP_500_INTERNAL_SERVER_ERROR -class UnsupportedFeatureORM(InternalServerError): - """ - Init for invalid ORM exception. - - Unsupported feature ORM exception class customized for json_api exceptions. - """ - - title = "Unsupported ORM" - - class BadRequest(HTTPException): """ Bad request HTTP exception class customized for json_api exceptions. diff --git a/fastapi_jsonapi/jsonapi_typing.py b/fastapi_jsonapi/jsonapi_typing.py index f29dc1f1..a29fb1ce 100644 --- a/fastapi_jsonapi/jsonapi_typing.py +++ b/fastapi_jsonapi/jsonapi_typing.py @@ -1,12 +1,10 @@ """JSON API types.""" from typing import ( - Dict, - List, Optional, Union, ) DictValueType = Union[str, int, float, dict, list] -Filters = List[Dict[str, Optional[DictValueType]]] -JsonParamsType = Dict[str, DictValueType] +Filters = list[dict[str, Optional[DictValueType]]] +JsonParamsType = dict[str, DictValueType] diff --git a/fastapi_jsonapi/querystring.py b/fastapi_jsonapi/querystring.py index eec7813e..349995ad 100644 --- a/fastapi_jsonapi/querystring.py +++ b/fastapi_jsonapi/querystring.py @@ -1,17 +1,20 @@ """Helper to deal with querystring parameters according to jsonapi specification.""" + +from __future__ import annotations + from collections import defaultdict from functools import cached_property from typing import ( TYPE_CHECKING, Any, - Dict, - List, - Optional, - Type, ) from urllib.parse import unquote -import simplejson as json +try: + import orjson as json +except ImportError: + import json + from fastapi import ( FastAPI, Request, @@ -33,7 +36,7 @@ ) from fastapi_jsonapi.schema import ( get_model_field, - get_relationships, + get_relationship_fields_names, ) from fastapi_jsonapi.splitter import SPLIT_REL @@ -48,10 +51,10 @@ class PaginationQueryStringManager(BaseModel): Contains info about offsets, sizes, number and limits of query with pagination. """ - offset: Optional[int] = None - size: Optional[int] = 25 + offset: int | None = None + size: int | None = 25 number: int = 1 - limit: Optional[int] = None + limit: int | None = None class HeadersQueryStringManager(BaseModel): @@ -61,13 +64,13 @@ class HeadersQueryStringManager(BaseModel): Contains info about request headers. """ - host: Optional[str] = None - connection: Optional[str] = None - accept: Optional[str] = None - user_agent: Optional[str] = Field(None, alias="user-agent") - referer: Optional[str] = None - accept_encoding: Optional[str] = Field(None, alias="accept-encoding") - accept_language: Optional[str] = Field(None, alias="accept-language") + host: str | None = None + connection: str | None = None + accept: str | None = None + user_agent: str | None = Field(None, alias="user-agent") + referer: str | None = None + accept_encoding: str | None = Field(None, alias="accept-encoding") + accept_language: str | None = Field(None, alias="accept-language") class QueryStringManager: @@ -84,22 +87,23 @@ def __init__(self, request: Request) -> None: self.request: Request = request self.app: FastAPI = request.app self.qs: QueryParams = request.query_params - self.config: Dict[str, Any] = getattr(self.app, "config", {}) + self.config: dict[str, Any] = getattr(self.app, "config", {}) self.ALLOW_DISABLE_PAGINATION: bool = self.config.get("ALLOW_DISABLE_PAGINATION", True) self.MAX_PAGE_SIZE: int = self.config.get("MAX_PAGE_SIZE", 10000) self.MAX_INCLUDE_DEPTH: int = self.config.get("MAX_INCLUDE_DEPTH", 3) self.headers: HeadersQueryStringManager = HeadersQueryStringManager(**dict(self.request.headers)) - def _extract_item_key(self, key: str) -> str: + @classmethod + def extract_item_key(cls, key: str) -> str: try: key_start = key.index("[") + 1 key_end = key.index("]") return key[key_start:key_end] - except Exception: + except (IndexError, ValueError): msg = "Parse error" raise BadRequest(msg, parameter=key) - def _get_unique_key_values(self, name: str) -> Dict[str, str]: + def _get_unique_key_values(self, name: str) -> dict[str, str]: """ Return a dict containing key / values items for a given key, used for items like filters, page, etc. @@ -114,12 +118,12 @@ def _get_unique_key_values(self, name: str) -> Dict[str, str]: if not key.startswith(name): continue - item_key = self._extract_item_key(key) + item_key = self.extract_item_key(key) results[item_key] = value return results - def _get_multiple_key_values(self, name: str) -> Dict[str, List]: + def _get_multiple_key_values(self, name: str) -> dict[str, list]: results = defaultdict(list) for raw_key, value in self.qs.multi_items(): @@ -127,18 +131,18 @@ def _get_multiple_key_values(self, name: str) -> Dict[str, List]: if not key.startswith(name): continue - item_key = self._extract_item_key(key) + item_key = self.extract_item_key(key) results[item_key].extend(value.split(",")) return results @classmethod - def _simple_filters(cls, dict_: Dict[str, Any]) -> List[Dict[str, Any]]: + def _simple_filters(cls, dict_: dict[str, Any]) -> list[dict[str, Any]]: """Filter creation.""" return [{"name": key, "op": "eq", "val": value} for (key, value) in dict_.items()] @property - def querystring(self) -> Dict[str, str]: + def querystring(self) -> dict[str, str]: """ Return original querystring but containing only managed keys. @@ -151,7 +155,7 @@ def querystring(self) -> Dict[str, str]: } @property - def filters(self) -> List[dict]: + def filters(self) -> list[dict]: """ Return filters from query string. @@ -199,7 +203,7 @@ def pagination(self) -> PaginationQueryStringManager: :raises BadRequest: if the client is not allowed to disable pagination. """ # check values type - pagination_data: Dict[str, str] = self._get_unique_key_values("page") + pagination_data: dict[str, str] = self._get_unique_key_values("page") pagination = PaginationQueryStringManager(**pagination_data) if pagination_data.get("size") is None: pagination.size = None @@ -236,25 +240,22 @@ def fields(self) -> dict[str, set[Any]]: msg = f"Application has no resource with type {resource_type!r}" raise InvalidType(msg) - schema: Type[BaseModel] = self._get_schema(resource_type) + schema: type[BaseModel] = self._get_schema(resource_type) for field_name in field_names: if field_name == "": continue - if field_name not in schema.__fields__: - msg = "{schema} has no attribute {field}".format( - schema=schema.__name__, - field=field_name, - ) + if field_name not in schema.model_fields: + msg = f"{schema.__name__} has no attribute {field_name}" raise InvalidField(msg) return {resource_type: set(field_names) for resource_type, field_names in fields.items()} - def _get_schema(self, resource_type: str) -> Type[BaseModel]: - return RoutersJSONAPI.all_jsonapi_routers[resource_type]._schema + def _get_schema(self, resource_type: str) -> type[BaseModel]: + return RoutersJSONAPI.all_jsonapi_routers[resource_type].schema - def get_sorts(self, schema: Type["TypeSchema"]) -> List[Dict[str, str]]: + def get_sorts(self, schema: type[TypeSchema]) -> list[dict[str, str]]: """ Return fields to sort by including sort name for SQLAlchemy and row sort parameter for other ORMs. @@ -268,29 +269,28 @@ def get_sorts(self, schema: Type["TypeSchema"]) -> List[Dict[str, str]]: :raises InvalidSort: if sort field wrong. """ - if sort_q := self.qs.get("sort"): - sorting_results = [] - for sort_field in sort_q.split(","): - field = sort_field.replace("-", "") - if SPLIT_REL not in field: - if field not in schema.model_fields: - msg = "{schema} has no attribute {field}".format( - schema=schema.__name__, - field=field, - ) - raise InvalidSort(msg) - if field in get_relationships(schema): - msg = "You can't sort on {field} because it is a relationship field".format(field=field) - raise InvalidSort(msg) - field = get_model_field(schema, field) - order = "desc" if sort_field.startswith("-") else "asc" - sorting_results.append({"field": field, "order": order}) - return sorting_results - - return [] + if not (sort_q := self.qs.get("sort")): + return [] + + sorting_results = [] + relationships_fields_names = get_relationship_fields_names(schema) + + for sort_field in sort_q.split(","): + field = sort_field.replace("-", "") + if SPLIT_REL not in field: + if field not in schema.model_fields: + msg = f"{schema.__name__} has no attribute {field}" + raise InvalidSort(msg) + if field in relationships_fields_names: + msg = f"You can't sort by relationship field {field!r} on {schema.__name__!r}" + raise InvalidSort(msg) + field = get_model_field(schema, field) + order = "desc" if sort_field.startswith("-") else "asc" + sorting_results.append({"field": field, "order": order}) + return sorting_results @property - def include(self) -> List[str]: + def include(self) -> list[str]: """ Return fields to include. @@ -303,8 +303,6 @@ def include(self) -> List[str]: if self.MAX_INCLUDE_DEPTH is not None: for include_path in includes: if len(include_path.split(SPLIT_REL)) > self.MAX_INCLUDE_DEPTH: - msg = "You can't use include through more than {max_include_depth} relationships".format( - max_include_depth=self.MAX_INCLUDE_DEPTH, - ) + msg = f"You can't use include through more than {self.MAX_INCLUDE_DEPTH} relationships" raise InvalidInclude(msg) return includes diff --git a/fastapi_jsonapi/schema.py b/fastapi_jsonapi/schema.py index fd361f31..357e9d1d 100644 --- a/fastapi_jsonapi/schema.py +++ b/fastapi_jsonapi/schema.py @@ -1,26 +1,35 @@ -""" -Base JSON:API schemas. +from __future__ import annotations -Pydantic (for FastAPI). -""" +from inspect import isclass +from types import ( + GenericAlias, + UnionType, +) from typing import ( TYPE_CHECKING, - Dict, - List, - Optional, - Sequence, - Type, Union, + get_args, ) -from fastapi import FastAPI from pydantic import ( - BaseModel, + BaseModel as PydanticBaseModel, +) +from pydantic import ( ConfigDict, Field, ) +from pydantic._internal._typing_extra import is_none_type + +from fastapi_jsonapi.common import search_relationship_info +from fastapi_jsonapi.schema_base import BaseModel if TYPE_CHECKING: + # noinspection PyProtectedMember + from collections.abc import Sequence + + from fastapi import FastAPI + from pydantic.fields import FieldInfo + from fastapi_jsonapi.data_typing import TypeSchema @@ -36,7 +45,7 @@ class BaseJSONAPIRelationshipDataToOneSchema(BaseModel): class BaseJSONAPIRelationshipDataToManySchema(BaseModel): - data: List[BaseJSONAPIRelationshipSchema] + data: list[BaseJSONAPIRelationshipSchema] class BaseJSONAPIItemSchema(BaseModel): @@ -54,9 +63,9 @@ class BaseJSONAPIItemInSchema(BaseJSONAPIItemSchema): TODO PATCH: accept object id (maybe create a new separate schema) """ - attributes: "TypeSchema" = Field(description="Resource object attributes") - relationships: Optional["TypeSchema"] = Field(None, description="Resource object relationships") - id: Optional[str] = Field(None, description="Resource object ID") + attributes: TypeSchema = Field(description="Resource object attributes") + relationships: TypeSchema | None = Field(None, description="Resource object relationships") + id: str | None = Field(None, description="Resource object ID") class BaseJSONAPIDataInSchema(BaseModel): @@ -72,8 +81,8 @@ class BaseJSONAPIObjectSchema(BaseJSONAPIItemSchema): class JSONAPIResultListMetaSchema(BaseModel): """JSON:API list meta schema.""" - count: Optional[int] = None - total_pages: Optional[int] = Field(None, alias="totalPages") + count: int | None = None + total_pages: int | None = Field(None, alias="totalPages") model_config = ConfigDict( extra="forbid", populate_by_name=True, @@ -93,13 +102,15 @@ class JSONAPIDocumentObjectSchema(BaseModel): class JSONAPIObjectSchema(BaseJSONAPIObjectSchema): """JSON:API base object schema.""" + model_config = ConfigDict(from_attributes=True) + class BaseJSONAPIResultSchema(BaseModel): - """ - JSON:API Required fields schema - """ + """JSON:API Required fields schema""" + + model_config = ConfigDict(from_attributes=True) - meta: Optional[JSONAPIResultListMetaSchema] = Field(None, description="JSON:API metadata") + meta: JSONAPIResultListMetaSchema | None = Field(None, description="JSON:API metadata") jsonapi: JSONAPIDocumentObjectSchema = JSONAPIDocumentObjectSchema() @@ -116,8 +127,8 @@ class JSONAPIResultDetailSchema(BaseJSONAPIResultSchema): RelationshipInfoSchema = Union[ - Type[BaseJSONAPIRelationshipDataToOneSchema], - Type[BaseJSONAPIRelationshipDataToManySchema], + type[BaseJSONAPIRelationshipDataToOneSchema], + type[BaseJSONAPIRelationshipDataToManySchema], ] @@ -125,7 +136,7 @@ class JSONAPISchemaIntrospectionError(Exception): pass -def get_model_field(schema: Type["TypeSchema"], field: str) -> str: +def get_model_field(schema: type[TypeSchema], field: str) -> str: """ Get the model field of a schema field. @@ -144,36 +155,28 @@ class ComputerSchema(pydantic_base): :raises Exception: if the schema from parameter has no attribute for parameter. """ if schema.model_fields.get(field) is None: - msg = "{schema} has no attribute {field}".format( - schema=schema.__name__, - field=field, - ) + msg = f"{schema.__name__} has no attribute {field}" raise JSONAPISchemaIntrospectionError(msg) return field -def get_relationships(schema: Type["TypeSchema"], model_field: bool = False) -> List[str]: +def get_relationship_fields_names( + schema: type[TypeSchema], +) -> set[str]: """ Return relationship fields of a schema. :param schema: a schemas schema - :param model_field: list of relationship fields of a schema """ - relationships: List[str] = [] + names: set[str] = set() for i_name, i_type in schema.model_fields.items(): - try: - if issubclass(i_type.annotation, BaseModel): - relationships.append(i_name) - except TypeError: - pass + if search_relationship_info.first(i_type): + names.add(i_name) - if model_field is True: - relationships = [get_model_field(schema, key) for key in relationships] + return names - return relationships - -def get_schema_from_type(resource_type: str, app: FastAPI) -> Type[BaseModel]: +def get_schema_from_type(resource_type: str, app: FastAPI) -> type[BaseModel]: """ Retrieve a schema from the registry by his type. @@ -182,15 +185,38 @@ def get_schema_from_type(resource_type: str, app: FastAPI) -> Type[BaseModel]: :return Schema: the schema class. :raises Exception: if the schema not found for this resource type. """ - schemas: Dict[str, Type[BaseModel]] = getattr(app, "schemas", {}) + schemas: dict[str, type[BaseModel]] = getattr(app, "schemas", {}) try: return schemas[resource_type] except KeyError: - msg = "Couldn't find schema for type: {type}".format(type=resource_type) - raise Exception(msg) + msg = f"Couldn't find schema for type: {resource_type}" + raise ValueError(msg) + + +def get_schema_from_field_annotation(field: FieldInfo) -> type[BaseModel] | None: + """TODO: consider using pydantic's GenerateSchema ?""" + choices = [] + if isinstance(field.annotation, UnionType): + args = get_args(field.annotation) + choices.extend(args) + else: + choices.append(field.annotation) + while choices: + elem = choices.pop(0) + if isinstance(elem, GenericAlias): + choices.extend(get_args(elem)) + continue + + if is_none_type(elem): + continue + + if isclass(elem) and issubclass(elem, PydanticBaseModel): + return elem + + return None -def get_related_schema(schema: Type["TypeSchema"], field: str) -> Type["TypeSchema"]: +def get_related_schema(schema: type[TypeSchema], field: str) -> type[TypeSchema]: """ Retrieve the related schema of a relationship field. @@ -198,4 +224,4 @@ def get_related_schema(schema: Type["TypeSchema"], field: str) -> Type["TypeSche :params field: the relationship field :return: the related schema """ - return schema.model_fields[field].annotation + return get_schema_from_field_annotation(schema.model_fields[field]) diff --git a/fastapi_jsonapi/schema_base.py b/fastapi_jsonapi/schema_base.py index 4e4ccca7..bacad9eb 100644 --- a/fastapi_jsonapi/schema_base.py +++ b/fastapi_jsonapi/schema_base.py @@ -1,14 +1,9 @@ -from pydantic import ConfigDict - __all__ = ( "Field", "BaseModel", "registry", - "RelationshipInfo", ) -from typing import Dict - from pydantic import BaseModel as BaseModelGeneric from pydantic import Field @@ -39,14 +34,3 @@ def __init_subclass__(cls, **kwargs): class BaseModel(RegistryMeta): pass - - -class RelationshipInfo(BaseModel): - resource_type: str - many: bool = False - related_view: str = None - related_view_kwargs: Dict[str, str] = Field(default_factory=dict) - resource_id_example: str = "1" - id_field_name: str = "id" - - model_config = ConfigDict(frozen=True) diff --git a/fastapi_jsonapi/schema_builder.py b/fastapi_jsonapi/schema_builder.py index a01a8c83..c83d6b45 100644 --- a/fastapi_jsonapi/schema_builder.py +++ b/fastapi_jsonapi/schema_builder.py @@ -1,25 +1,35 @@ """JSON API schemas builder class.""" -from dataclasses import dataclass + +from __future__ import annotations + +import logging +from dataclasses import ( + dataclass, +) from typing import ( + TYPE_CHECKING, + Annotated, Any, Callable, ClassVar, - Dict, - Iterable, - List, - Optional, - Tuple, - Type, TypeVar, Union, ) import pydantic -from pydantic import BaseModel as PydanticBaseModel -from pydantic import ConfigDict -from pydantic.fields import FieldInfo +from pydantic import ( + AfterValidator, + BeforeValidator, + ConfigDict, +) +from pydantic import ( + BaseModel as PydanticBaseModel, +) -from fastapi_jsonapi.data_typing import TypeSchema +from fastapi_jsonapi.common import ( + get_relationship_info_from_field_metadata, + search_client_can_set_id, +) from fastapi_jsonapi.schema import ( BaseJSONAPIDataInSchema, BaseJSONAPIItemInSchema, @@ -31,28 +41,46 @@ JSONAPIResultDetailSchema, JSONAPIResultListSchema, RelationshipInfoSchema, + get_schema_from_field_annotation, ) -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo, registry -from fastapi_jsonapi.splitter import SPLIT_REL -from fastapi_jsonapi.validation_utils import ( - extract_field_validators, - extract_validators, +from fastapi_jsonapi.schema_base import ( + BaseModel, + Field, + registry, +) +from fastapi_jsonapi.schema_builder_dtos import ( + IncludedSchemaDTO, + ResourceIdFieldDTO, + SchemasInfoDTO, + BuiltSchemasDTO, ) +from fastapi_jsonapi.splitter import SPLIT_REL +from fastapi_jsonapi.types_metadata import RelationshipInfo +from fastapi_jsonapi.validation_utils import extract_validators + +if TYPE_CHECKING: + from collections.abc import Iterable + + from pydantic.fields import FieldInfo + -JSON_API_RESPONSE_TYPE = Dict[Union[int, str], Dict[str, Any]] +JSON_API_RESPONSE_TYPE = dict[int | str, dict[str, Any]] JSONAPIObjectSchemaType = TypeVar("JSONAPIObjectSchemaType", bound=PydanticBaseModel) not_passed = object() +log = logging.getLogger(__name__) -# todo: when 3.9 support is dropped, return back `slots=True to JSONAPIObjectSchemas dataclass` + +# TODO: check in runtime and update dataclass kwargs (slots) +# TODO: when 3.9 support is dropped, return back `slots=True to JSONAPIObjectSchemas dataclass` class FieldConfig: cast_type: Callable - def __init__(self, cast_type: Optional[Callable] = None): + def __init__(self, cast_type: Callable | None = None): self.cast_type = cast_type @@ -75,41 +103,17 @@ def get_field_config() -> FieldConfig: @dataclass(frozen=True) class JSONAPIObjectSchemas: - attributes_schema: Type[BaseModel] - relationships_schema: Type[BaseModel] - object_jsonapi_schema: Type[JSONAPIObjectSchema] - can_be_included_schemas: Dict[str, Type[JSONAPIObjectSchema]] + attributes_schema: type[BaseModel] + relationships_schema: type[BaseModel] + object_jsonapi_schema: type[JSONAPIObjectSchema] + can_be_included_schemas: dict[str, type[JSONAPIObjectSchema]] @property - def included_schemas_list(self) -> List[Type[JSONAPIObjectSchema]]: + def included_schemas_list(self) -> list[type[JSONAPIObjectSchema]]: return list(self.can_be_included_schemas.values()) -@dataclass(frozen=True) -class BuiltSchemasDTO: - schema_in_post: Type[BaseJSONAPIDataInSchema] - schema_in_post_data: Type[BaseJSONAPIItemInSchema] - schema_in_patch: Type[BaseJSONAPIDataInSchema] - schema_in_patch_data: Type[BaseJSONAPIItemInSchema] - detail_response_schema: Type[JSONAPIResultDetailSchema] - list_response_schema: Type[JSONAPIResultListSchema] - - -FieldValidators = Dict[str, Callable] - - -@dataclass(frozen=True) -class SchemasInfoDTO: - # id field - resource_id_field: Tuple[Type, FieldInfo, Callable, FieldValidators] - # pre-built attributes - attributes_schema: Type[BaseModel] - # relationships - relationships_schema: Type[BaseModel] - # has any required relationship - has_required_relationship: bool - # anything that can be included - included_schemas: List[Tuple[str, BaseModel, str]] +FieldValidators = dict[str, Callable] class SchemaBuilder: @@ -125,24 +129,19 @@ def __init__( ): self._resource_type = resource_type - def _create_schemas_objects_list(self, schema: Type[BaseModel]) -> Type[JSONAPIResultListSchema]: + def _create_schemas_objects_list(self, schema: type[BaseModel]) -> type[JSONAPIResultListSchema]: object_jsonapi_list_schema, list_jsonapi_schema = self.build_list_schemas(schema) - # TODO: do we need this `object_jsonapi_list_schema` field? it's not used anywhere 🤔 - # self.object_jsonapi_list_schema: Type[JSONAPIObjectSchema] = object_jsonapi_list_schema return list_jsonapi_schema - def _create_schemas_object_detail(self, schema: Type[BaseModel]) -> Type[JSONAPIResultDetailSchema]: + def _create_schemas_object_detail(self, schema: type[BaseModel]) -> type[JSONAPIResultDetailSchema]: object_jsonapi_detail_schema, detail_jsonapi_schema = self.build_detail_schemas(schema) - # TODO: do we need this `object_jsonapi_detail_schema` field? it's not used anywhere 🤔 - # self.object_jsonapi_detail_schema: Type[JSONAPIObjectSchema] = object_jsonapi_detail_schema - return detail_jsonapi_schema def create_schemas( self, - schema: Type[BaseModel], - schema_in_post: Optional[Type[BaseModel]] = None, - schema_in_patch: Optional[Type[BaseModel]] = None, + schema: type[BaseModel], + schema_in_post: type[BaseModel] | None = None, + schema_in_patch: type[BaseModel] | None = None, ) -> BuiltSchemasDTO: # TODO: generic? schema_in_post = schema_in_post or schema @@ -180,11 +179,12 @@ def create_schemas( def build_schema_in( self, - schema_in: Type[BaseModel], + schema_in: type[BaseModel], schema_name_suffix: str = "", + *, non_optional_relationships: bool = False, id_field_required: bool = False, - ) -> Tuple[Type[BaseJSONAPIDataInSchema], Type[BaseJSONAPIItemInSchema]]: + ) -> tuple[type[BaseJSONAPIDataInSchema], type[BaseJSONAPIItemInSchema]]: base_schema_name = schema_in.__name__.removesuffix("Schema") + schema_name_suffix dto = self._get_info_from_schema_for_building( @@ -196,9 +196,7 @@ def build_schema_in( object_jsonapi_schema = self._build_jsonapi_object( base_name=base_schema_name, resource_type=self._resource_type, - attributes_schema=dto.attributes_schema, - relationships_schema=dto.relationships_schema, - resource_id_field=dto.resource_id_field, + schemas_info_dto=dto, includes=not_passed, model_base=BaseJSONAPIItemInSchema, relationships_required=dto.has_required_relationship, @@ -216,7 +214,7 @@ def build_schema_in( def _build_schema( self, base_name: str, - schema: Type[BaseModel], + schema: type[BaseModel], builder: Callable, includes: Iterable[str] = not_passed, ): @@ -236,9 +234,9 @@ def _build_schema( def build_detail_schemas( self, - schema: Type[BaseModel], + schema: type[BaseModel], includes: Iterable[str] = not_passed, - ) -> Tuple[Type[JSONAPIObjectSchema], Type[JSONAPIResultDetailSchema]]: + ) -> tuple[type[JSONAPIObjectSchema], type[JSONAPIResultDetailSchema]]: return self._build_schema( base_name=f"{schema.__name__}Detail", schema=schema, @@ -248,9 +246,9 @@ def build_detail_schemas( def build_list_schemas( self, - schema: Type[BaseModel], + schema: type[BaseModel], includes: Iterable[str] = not_passed, - ) -> Tuple[Type[JSONAPIObjectSchema], Type[JSONAPIResultListSchema]]: + ) -> tuple[type[JSONAPIObjectSchema], type[JSONAPIResultListSchema]]: return self._build_schema( base_name=f"{schema.__name__}List", schema=schema, @@ -258,71 +256,100 @@ def build_list_schemas( includes=includes, ) + @classmethod + def _annotation_with_validators(cls, field: FieldInfo) -> type: + annotation = field.annotation + validators = [] + for val in field.metadata: + if isinstance(val, BeforeValidator | AfterValidator): + validators.append(val) + + if validators: + annotation = Annotated[annotation, *validators] + + return annotation + def _get_info_from_schema_for_building( self, base_name: str, - schema: Type[BaseModel], + schema: type[BaseModel], includes: Iterable[str] = not_passed, + *, non_optional_relationships: bool = False, ) -> SchemasInfoDTO: attributes_schema_fields = {} relationships_schema_fields = {} - included_schemas: List[Tuple[str, Optional[type], str]] = [] + included_schemas: list[IncludedSchemaDTO] = [] has_required_relationship = False - resource_id_field = (str, Field(None), None, {}) + resource_id_field = ResourceIdFieldDTO(field_type=str) + # required! otherwise we get ForwardRef + schema.model_rebuild(_types_namespace=registry.schemas) + # TODO: can schema.model_fields be empty? + # annotation for schema to have `model_fields` for name, field in (schema.model_fields or {}).items(): - if field.json_schema_extra and isinstance(field.json_schema_extra.get("relationship"), RelationshipInfo): + if relationship_info := get_relationship_info_from_field_metadata(field): if includes is not_passed: pass elif name not in includes: # if includes are passed, skip this if name not present! continue - relationship: RelationshipInfo = field.json_schema_extra.get("relationship") relationship_schema = self.create_relationship_data_schema( field_name=name, base_name=base_name, field=field, - relationship_info=relationship, + relationship_info=relationship_info, ) - # TODO: xxx - # is there a way to read that the field type is Optional? (r.n. it's ForwardRef) - # consider field is not required until is marked required explicitly (`default=...` means required) - field_marked_required = field.is_required() is True + field_marked_required = field.is_required() relationship_field = ... if (non_optional_relationships and field_marked_required) else None if relationship_field is not None: has_required_relationship = True relationships_schema_fields[name] = (relationship_schema, relationship_field) # works both for to-one and to-many - included_schemas.append((name, field.annotation, relationship.resource_type)) - elif field.json_schema_extra and name == "id": - id_validators = extract_field_validators( - schema, + related_schema = get_schema_from_field_annotation(field) + if related_schema: + included_schemas.append( + IncludedSchemaDTO( + name=name, + related_schema=related_schema, + related_resource_type=relationship_info.resource_type, + ), + ) + else: + log.warning("Could not find related schema in field %s", field) + elif name == "id": + id_validators = extract_validators( + model=schema, include_for_field_names={"id"}, ) - resource_id_field = (*(resource_id_field[:-1]), id_validators) + resource_id_field.validators = id_validators - if field.json_schema_extra and not field.json_schema_extra.get("client_can_set_id", False): + if not (can_set_id := search_client_can_set_id.first(field)): continue - # todo: support for union types? - # support custom cast func - resource_id_field = (str, Field(**field.json_schema_extra), field.annotation, id_validators) + resource_id_field.field_type = self._annotation_with_validators(field=field) + resource_id_field.client_can_set_id = can_set_id else: - attributes_schema_fields[name] = (field.annotation, field.default) - ConfigOrmMode = ConfigDict(from_attributes=True) + annotation = self._annotation_with_validators(field=field) + attributes_schema_fields[name] = (annotation, field.default) + + model_config = ConfigDict(from_attributes=True) + extracted_validators = extract_validators( + model=schema, + exclude_for_field_names={"id"}, + ) attributes_schema = pydantic.create_model( f"{base_name}AttributesJSONAPI", **attributes_schema_fields, - __config__=ConfigOrmMode, - __validators__=extract_validators(schema, exclude_for_field_names={"id"}), + __config__=model_config, + __validators__=extracted_validators, ) relationships_schema = pydantic.create_model( f"{base_name}RelationshipsJSONAPI", **relationships_schema_fields, - __config__=ConfigOrmMode, + __config__=model_config, ) return SchemasInfoDTO( @@ -333,26 +360,32 @@ def _get_info_from_schema_for_building( included_schemas=included_schemas, ) + @classmethod def create_relationship_schema( - self, + cls, name: str, relationship_info: RelationshipInfo, - ) -> Type[BaseJSONAPIRelationshipSchema]: + ) -> type[BaseJSONAPIRelationshipSchema]: # TODO: cache? if name.endswith("s"): # plural to single name = name[:-1] schema_name = f"{name}RelationshipJSONAPI".format(name=name) - relationship_schema = pydantic.create_model( + return pydantic.create_model( schema_name, - id=(str, Field(..., description="Resource object id", example=relationship_info.resource_id_example)), + id=( + str, + Field( + ..., + description="Resource object id", + json_schema_extra=dict(example=relationship_info.resource_id_example), + ), + ), type=(str, Field(default=relationship_info.resource_type, description="Resource type")), __base__=BaseJSONAPIRelationshipSchema, ) - return relationship_schema - def create_relationship_data_schema( self, field_name: str, @@ -372,8 +405,10 @@ def create_relationship_data_schema( ) base = BaseJSONAPIRelationshipDataToOneSchema if relationship_info.many: - relationship_schema = List[relationship_schema] + relationship_schema = list[relationship_schema] base = BaseJSONAPIRelationshipDataToManySchema + elif not field.is_required(): + relationship_schema = relationship_schema | None relationship_data_schema = pydantic.create_model( f"{schema_name}RelationshipDataJSONAPI", @@ -384,50 +419,74 @@ def create_relationship_data_schema( self.relationship_schema_cache[cache_key] = relationship_data_schema return relationship_data_schema + @classmethod + def _build_relationships_schema_definition( + cls, + relationships_required: bool, + relationships_schema: type[BaseModel], + ) -> tuple: + default = ... + if not relationships_required: + default = None + relationships_schema = relationships_schema | None + field_definition = (relationships_schema | None, default) + return field_definition + + @classmethod + def _build_object_jsonapi_schema_fields( + cls, + attributes_schema, + resource_id_field: ResourceIdFieldDTO, + id_field_required: bool, + resource_type: str, + ) -> dict: + id_type = resource_id_field.field_type + + if resource_id_field.client_can_set_id: + id_type = Annotated[id_type, resource_id_field.client_can_set_id] + + object_jsonapi_schema_fields = {} + object_jsonapi_schema_fields.update( + id=(id_type, Field(... if id_field_required else None)), + attributes=(attributes_schema, ...), + type=(str, Field(default=resource_type, description="Resource type")), + ) + return object_jsonapi_schema_fields + def _build_jsonapi_object( self, base_name: str, resource_type: str, - attributes_schema: Type[TypeSchema], - relationships_schema: Type[TypeSchema], + schemas_info_dto: SchemasInfoDTO, includes, - resource_id_field: Tuple[Type, FieldInfo, Callable, FieldValidators], - model_base: Type[JSONAPIObjectSchemaType] = JSONAPIObjectSchema, + model_base: type[JSONAPIObjectSchemaType] = JSONAPIObjectSchema, + *, use_schema_cache: bool = True, relationships_required: bool = False, id_field_required: bool = False, - ) -> Type[JSONAPIObjectSchemaType]: + ) -> type[JSONAPIObjectSchemaType]: if use_schema_cache and base_name in self.base_jsonapi_object_schemas_cache: return self.base_jsonapi_object_schemas_cache[base_name] - field_type, field_info, id_cast_func, id_validators = resource_id_field - # FIXME: Почему-то сюда прилетает NoneType и из-за этого не запускаются примеры - try: - id_field_kw = { - **field_info.json_schema_extra, - } - except TypeError: - id_field_kw = {} - if id_cast_func: - id_field_kw.update( - field_config=TransferSaveWrapper(field_config=FieldConfig(cast_type=id_cast_func)), - ) - - object_jsonapi_schema_fields = { - "attributes": (attributes_schema, ...), - "id": (str, Field(... if id_field_required else None, **id_field_kw)), - # "id": (str, Field(... if id_field_required else None)), - } + # TODO: pass all decorator infos for whole schema for attributes schema + object_jsonapi_schema_fields = self._build_object_jsonapi_schema_fields( + attributes_schema=schemas_info_dto.attributes_schema, + resource_id_field=schemas_info_dto.resource_id_field, + id_field_required=id_field_required, + resource_type=resource_type or self._resource_type, + ) if includes: object_jsonapi_schema_fields.update( - relationships=(relationships_schema, (... if relationships_required else None)), + relationships=self._build_relationships_schema_definition( + relationships_required=relationships_required, + relationships_schema=schemas_info_dto.relationships_schema, + ), ) object_jsonapi_schema = pydantic.create_model( f"{base_name}ObjectJSONAPI", **object_jsonapi_schema_fields, - type=(str, Field(default=resource_type or self._resource_type, description="Resource type")), - __validators__=id_validators, + __validators__=schemas_info_dto.resource_id_field.validators, __base__=model_base, ) @@ -438,29 +497,43 @@ def _build_jsonapi_object( def find_all_included_schemas( self, - schema: Type[BaseModel], + schema: type[BaseModel], resource_type: str, includes: Iterable[str], - included_schemas: List[Tuple[str, BaseModel, str]], - ) -> Dict[str, Type[JSONAPIObjectSchema]]: + included_schemas: list[IncludedSchemaDTO], + ) -> dict[str, type[JSONAPIObjectSchema]]: if includes is not_passed: return { # prepare same object schema # TODO: caches?! - name: self.create_jsonapi_object_schemas( - included_schema, - resource_type=resource_type, + i.name: self.create_jsonapi_object_schemas( + i.related_schema, + resource_type=i.related_resource_type, ).object_jsonapi_schema - for (name, included_schema, resource_type) in included_schemas + for i in included_schemas } - can_be_included_schemas = {} + return self.find_all_included_schemas_from_annotations( + schema=schema, + resource_type=resource_type, + includes=includes, + ) + + def find_all_included_schemas_from_annotations( + self, + schema: type[BaseModel], + resource_type: str, + includes: Iterable[str], + ) -> dict[str, type[JSONAPIObjectSchema]]: + can_be_included_schemas: dict[str, type[JSONAPIObjectSchema]] = {} for i_include in includes: current_schema = schema - relations_list: List[str] = i_include.split(SPLIT_REL) + relations_list: list[str] = i_include.split(SPLIT_REL) for part_index, include_part in enumerate(relations_list, start=1): # find nested from the Schema - nested_schema: Type[BaseModel] = current_schema.model_fields[include_part].annotation + nested_schema = get_schema_from_field_annotation(current_schema.model_fields[include_part]) + # TODO: ? continue or raise? probably should be already checked + assert nested_schema is not None # find all relations for this one nested_schema_includes = set(relations_list[: part_index - 1] + relations_list[part_index:]) related_jsonapi_object_schema = self.create_jsonapi_object_schemas( @@ -478,31 +551,21 @@ def find_all_included_schemas( return can_be_included_schemas - @staticmethod - def string_to_schema(schema) -> Type[BaseModel]: - import importlib - - module_class_str = (str(schema).strip("[]").split("["))[-1] - module_name, class_name = module_class_str.rsplit(".", 1) - module = importlib.import_module(module_name) - class_obj = getattr(module, class_name) - schema = class_obj - return schema - def create_jsonapi_object_schemas( self, - schema: Type[BaseModel], + schema: type[BaseModel], includes: Iterable[str] = not_passed, - resource_type: Optional[str] = None, + resource_type: str | None = None, base_name: str = "", + *, compute_included_schemas: bool = False, use_schema_cache: bool = True, ) -> JSONAPIObjectSchemas: + # TODO: more caching (how? for each includes...) + if use_schema_cache and schema in self.object_schemas_cache and includes is not_passed: return self.object_schemas_cache[schema] - schema = self.string_to_schema(schema) if not hasattr(schema, "model_rebuild") else schema - schema.model_rebuild(_types_namespace=registry.schemas) base_name = base_name or schema.__name__ if includes is not not_passed: @@ -517,12 +580,11 @@ def create_jsonapi_object_schemas( object_jsonapi_schema = self._build_jsonapi_object( base_name=base_name, resource_type=resource_type, - attributes_schema=dto.attributes_schema, - relationships_schema=dto.relationships_schema, - resource_id_field=dto.resource_id_field, + schemas_info_dto=dto, includes=includes, use_schema_cache=use_schema_cache, # pass has_required_relationship ? + relationships_required=False, ) can_be_included_schemas = {} @@ -547,23 +609,22 @@ def create_jsonapi_object_schemas( def build_schema_for_list_result( self, name: str, - object_jsonapi_schema: Type[JSONAPIObjectSchema], - includes_schemas: List[Type[JSONAPIObjectSchema]], - ) -> Type[JSONAPIResultListSchema]: + object_jsonapi_schema: type[JSONAPIObjectSchema], + includes_schemas: list[type[JSONAPIObjectSchema]], + ) -> type[JSONAPIResultListSchema]: return self.build_schema_for_result( name=f"{name}JSONAPI", base=JSONAPIResultListSchema, - data_type=List[object_jsonapi_schema], + data_type=list[object_jsonapi_schema], includes_schemas=includes_schemas, ) def build_schema_for_detail_result( self, name: str, - object_jsonapi_schema: Type[JSONAPIObjectSchema], - includes_schemas: List[Type[JSONAPIObjectSchema]], - ) -> Type[JSONAPIResultDetailSchema]: - # return detail_jsonapi_schema + object_jsonapi_schema: type[JSONAPIObjectSchema], + includes_schemas: list[type[JSONAPIObjectSchema]], + ) -> type[JSONAPIResultDetailSchema]: return self.build_schema_for_result( name=f"{name}JSONAPI", base=JSONAPIResultDetailSchema, @@ -574,28 +635,28 @@ def build_schema_for_detail_result( def build_schema_for_result( self, name: str, - base: Type[BaseJSONAPIResultSchema], - data_type: Union[Type[JSONAPIObjectSchema], Type[List[JSONAPIObjectSchema]]], - includes_schemas: List[Type[JSONAPIObjectSchema]], - ) -> Union[Type[JSONAPIResultListSchema], Type[JSONAPIResultDetailSchema]]: + base: type[BaseJSONAPIResultSchema], + data_type: type[JSONAPIObjectSchema | list[JSONAPIObjectSchema]], + includes_schemas: list[type[JSONAPIObjectSchema]], + ) -> type[JSONAPIResultListSchema | JSONAPIResultDetailSchema]: included_schema_annotation = Union[JSONAPIObjectSchema] for includes_schema in includes_schemas: included_schema_annotation = Union[included_schema_annotation, includes_schema] - schema_fields = { - "data": (data_type, ...), - } + schema_fields = {} + schema_fields.update( + data=(data_type, ...), + ) if includes_schemas: schema_fields.update( included=( - List[included_schema_annotation], + list[included_schema_annotation], Field(None), ), ) - result_jsonapi_schema = pydantic.create_model( + return pydantic.create_model( name, **schema_fields, __base__=base, ) - return result_jsonapi_schema diff --git a/fastapi_jsonapi/schema_builder_dtos.py b/fastapi_jsonapi/schema_builder_dtos.py new file mode 100644 index 00000000..1d2e028a --- /dev/null +++ b/fastapi_jsonapi/schema_builder_dtos.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +from dataclasses import dataclass, field as dataclass_field +from typing import Any + +from fastapi_jsonapi.schema import ( + BaseJSONAPIDataInSchema, + BaseJSONAPIItemInSchema, + JSONAPIResultDetailSchema, + JSONAPIResultListSchema, +) +from fastapi_jsonapi.schema_base import BaseModel + + +@dataclass(frozen=True, slots=True) +class IncludedSchemaDTO: + # (name, related_schema, relationship_info.resource_type) + name: str + related_schema: type[BaseModel] + related_resource_type: str + + +@dataclass(frozen=False, slots=True) +class ResourceIdFieldDTO: + field_type: type + client_can_set_id: bool = False + validators: dict[str, classmethod[Any, Any, Any]] = dataclass_field(default_factory=dict) + + +@dataclass(frozen=True, slots=True) +class SchemasInfoDTO: + # id field + resource_id_field: ResourceIdFieldDTO + # pre-built attributes + attributes_schema: type[BaseModel] + # relationships + relationships_schema: type[BaseModel] + # has any required relationship + has_required_relationship: bool + # anything that can be included + included_schemas: list[IncludedSchemaDTO] + + +@dataclass(frozen=True) +class BuiltSchemasDTO: + schema_in_post: type[BaseJSONAPIDataInSchema] + schema_in_post_data: type[BaseJSONAPIItemInSchema] + schema_in_patch: type[BaseJSONAPIDataInSchema] + schema_in_patch_data: type[BaseJSONAPIItemInSchema] + detail_response_schema: type[JSONAPIResultDetailSchema] + list_response_schema: type[JSONAPIResultListSchema] diff --git a/fastapi_jsonapi/signature.py b/fastapi_jsonapi/signature.py index 6fea993f..d7967e0e 100644 --- a/fastapi_jsonapi/signature.py +++ b/fastapi_jsonapi/signature.py @@ -1,45 +1,75 @@ -"""Functions for extracting and updating signatures.""" import inspect import logging from enum import Enum from inspect import Parameter from typing import ( + TYPE_CHECKING, Optional, - Type, ) from fastapi import Query -from pydantic import Field -from fastapi_jsonapi.schema_base import BaseModel, registry +from fastapi_jsonapi.common import get_relationship_info_from_field_metadata +from fastapi_jsonapi.schema_base import ( + BaseModel, +) + +if TYPE_CHECKING: + # noinspection PyProtectedMember + from pydantic.fields import FieldInfo + log = logging.getLogger(__name__) -def create_filter_parameter(name: str, field: Field) -> Parameter: - if hasattr(field, "sub_fields") and field.sub_fields: - default = Query(None, alias="filter[{alias}]".format(alias=field.alias)) - type_field = field.annotation - elif ( +def create_filter_parameter( + name: str, + field: "FieldInfo", +) -> Parameter: + filter_alias = field.alias or name + query_filter_name = f"filter[{filter_alias}]" + if ( inspect.isclass(field.annotation) and issubclass(field.annotation, Enum) and hasattr(field.annotation, "values") ): - default = Query(None, alias="filter[{alias}]".format(alias=field.alias), enum=list(field.annotation)) + # TODO: enum handling? what if is optional? + default = Query( + None, + alias=query_filter_name, + # TODO: read from annotation or somehow else? + enum=list(field.annotation), + ) type_field = str else: - default = Query(None, alias="filter[{alias}]".format(alias=field.alias)) + default = Query(None, alias=query_filter_name) type_field = field.annotation return Parameter( - name, + name=name, kind=Parameter.POSITIONAL_OR_KEYWORD, annotation=Optional[type_field], default=default, ) -def create_additional_query_params(schema: Optional[Type[BaseModel]]) -> tuple[list[Parameter], list[Parameter]]: +def get_param_for_includes( + includes_names: list[str], +) -> Parameter: + doc_available_includes = "\n".join([f"* `{name}`" for name in includes_names]) + return Parameter( + "_jsonapi_include", + kind=Parameter.POSITIONAL_OR_KEYWORD, + annotation=Optional[str], + default=Query( + ",".join(includes_names), + alias="include", + description=f"Available includes:\n {doc_available_includes}", + ), + ) + + +def create_additional_query_params(schema: type[BaseModel]) -> tuple[list[Parameter], list[Parameter]]: filter_params = [] include_params = [] if not schema: @@ -47,43 +77,18 @@ def create_additional_query_params(schema: Optional[Type[BaseModel]]) -> tuple[l available_includes_names = [] - # TODO! ? - schema.model_rebuild(_types_namespace=registry.schemas) - for name, field in (schema.model_fields or {}).items(): - try: - try: - if field.json_schema_extra.get("relationship"): - available_includes_names.append(name) - continue - else: - log.warning( - " found nested schema %s for field %r. Consider marking it as relationship", - field, - name, - ) - except AttributeError: - pass - - # create filter params + # TODO: annotation? why `model_fields` is underlined in PyCharm? + for name, field in schema.model_fields.items(): + if get_relationship_info_from_field_metadata(field): + available_includes_names.append(name) + else: parameter = create_filter_parameter( name=name, field=field, ) filter_params.append(parameter) - except Exception as ex: - log.warning("could not create filter for field %s %s", name, field, exc_info=ex) if available_includes_names: - doc_available_includes = "\n".join([f"* `{name}`" for name in available_includes_names]) - include_param = Parameter( - "_jsonapi_include", - kind=Parameter.POSITIONAL_OR_KEYWORD, - annotation=Optional[str], - default=Query( - ",".join(available_includes_names), - alias="include", - description=f"Available includes:\n {doc_available_includes}", - ), - ) + include_param = get_param_for_includes(available_includes_names) include_params.append(include_param) return filter_params, include_params diff --git a/fastapi_jsonapi/types_metadata/__init__.py b/fastapi_jsonapi/types_metadata/__init__.py new file mode 100644 index 00000000..9cfa8172 --- /dev/null +++ b/fastapi_jsonapi/types_metadata/__init__.py @@ -0,0 +1,8 @@ +__all__ = ( + "ClientCanSetId", + "CustomFilterSQL", + "RelationshipInfo", +) +from .client_can_set_id import ClientCanSetId +from .custom_filter_sql import CustomFilterSQL +from .relationship_info import RelationshipInfo diff --git a/fastapi_jsonapi/types_metadata/client_can_set_id.py b/fastapi_jsonapi/types_metadata/client_can_set_id.py new file mode 100644 index 00000000..2dd7d002 --- /dev/null +++ b/fastapi_jsonapi/types_metadata/client_can_set_id.py @@ -0,0 +1,12 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import ( + Any, + Callable, +) + + +@dataclass(frozen=True) +class ClientCanSetId: + cast_type: Callable[[Any], Any] | None = None diff --git a/fastapi_jsonapi/types_metadata/custom_filter_sql.py b/fastapi_jsonapi/types_metadata/custom_filter_sql.py new file mode 100644 index 00000000..5400e58c --- /dev/null +++ b/fastapi_jsonapi/types_metadata/custom_filter_sql.py @@ -0,0 +1,28 @@ +from dataclasses import dataclass +from typing import ( + TYPE_CHECKING, + Generic, + TypeVar, +) + +if TYPE_CHECKING: + # noinspection PyProtectedMember + from pydantic.fields import FieldInfo + + +ColumnType = TypeVar("ColumnType") +ExpressionType = TypeVar("ExpressionType") + + +@dataclass(frozen=True) +class CustomFilterSQL(Generic[ColumnType, ExpressionType]): + op: str + + def get_expression( + self, + schema_field: "FieldInfo", + model_column: ColumnType, + value: str, + operator: str, + ) -> ExpressionType: + raise NotImplementedError diff --git a/fastapi_jsonapi/types_metadata/relationship_info.py b/fastapi_jsonapi/types_metadata/relationship_info.py new file mode 100644 index 00000000..4b25aa50 --- /dev/null +++ b/fastapi_jsonapi/types_metadata/relationship_info.py @@ -0,0 +1,9 @@ +from dataclasses import dataclass + + +@dataclass(frozen=True) +class RelationshipInfo: + resource_type: str + many: bool = False + resource_id_example: str = "1" + id_field_name: str = "id" diff --git a/fastapi_jsonapi/utils/__init__.py b/fastapi_jsonapi/utils/__init__.py index e69de29b..95b0bdd0 100644 --- a/fastapi_jsonapi/utils/__init__.py +++ b/fastapi_jsonapi/utils/__init__.py @@ -0,0 +1,7 @@ +__all__ = ( + "check_can_be_none", + "logical_xor", +) + +from .logical import logical_xor +from .none_checker import check_can_be_none diff --git a/fastapi_jsonapi/utils/dependency_helper.py b/fastapi_jsonapi/utils/dependency_helper.py index b41a3fd4..2c496a17 100644 --- a/fastapi_jsonapi/utils/dependency_helper.py +++ b/fastapi_jsonapi/utils/dependency_helper.py @@ -1,8 +1,8 @@ import inspect +from collections.abc import Awaitable from contextlib import AsyncExitStack from typing import ( Any, - Awaitable, Callable, TypeVar, Union, @@ -44,7 +44,7 @@ async def solve_dependencies_and_run(self, dependant: Dependant) -> ReturnType: if errors: raise RequestValidationError(errors, body=body) - orig_func: Callable[..., FuncReturnType[Any]] = dependant.call # type: ignore + orig_func: Callable[..., FuncReturnType[Any]] = dependant.call if inspect.iscoroutinefunction(orig_func): function_call_result = await orig_func(**values) else: diff --git a/fastapi_jsonapi/utils/logical.py b/fastapi_jsonapi/utils/logical.py new file mode 100644 index 00000000..6c5865d6 --- /dev/null +++ b/fastapi_jsonapi/utils/logical.py @@ -0,0 +1,5 @@ +from typing import Any + + +def logical_xor(left: Any, right: Any) -> bool: + return bool(left) != bool(right) diff --git a/fastapi_jsonapi/utils/metadata_instance_search.py b/fastapi_jsonapi/utils/metadata_instance_search.py new file mode 100644 index 00000000..a598d40d --- /dev/null +++ b/fastapi_jsonapi/utils/metadata_instance_search.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Generic, + TypeVar, +) + +if TYPE_CHECKING: + # noinspection PyProtectedMember + from collections.abc import Generator + + from pydantic.fields import FieldInfo + +SearchType = TypeVar("SearchType") + + +class MetadataInstanceSearch(Generic[SearchType]): + def __init__(self, search_type: type[SearchType]): + self.search_type = search_type + + def iterate(self, field: FieldInfo) -> Generator[SearchType, None, None]: + for elem in field.metadata: + if isinstance(elem, self.search_type): + yield elem + + return None + + def first(self, field: FieldInfo) -> SearchType | None: + return next(self.iterate(field), None) diff --git a/fastapi_jsonapi/utils/none_checker.py b/fastapi_jsonapi/utils/none_checker.py new file mode 100644 index 00000000..bda23a57 --- /dev/null +++ b/fastapi_jsonapi/utils/none_checker.py @@ -0,0 +1,24 @@ +from typing import ( + TYPE_CHECKING, + get_args, +) + +# noinspection PyProtectedMember +from pydantic._internal._typing_extra import is_none_type + +if TYPE_CHECKING: + # noinspection PyProtectedMember + from pydantic.fields import FieldInfo + + +def check_can_be_none(fields: list["FieldInfo"]) -> bool: + """ + Return True if None is possible value for target field + """ + for field in fields: + if args := get_args(field.annotation): + for arg in args: + # None is probably only on the top level + if is_none_type(arg): + return True + return False diff --git a/fastapi_jsonapi/utils/sqla.py b/fastapi_jsonapi/utils/sqla.py index 6e2ea85c..80af2eca 100644 --- a/fastapi_jsonapi/utils/sqla.py +++ b/fastapi_jsonapi/utils/sqla.py @@ -1,9 +1,7 @@ -from typing import Type - from fastapi_jsonapi.data_typing import TypeModel -def get_related_model_cls(cls: Type[TypeModel], relation_name: str) -> Type[TypeModel]: +def get_related_model_cls(cls: type[TypeModel], relation_name: str) -> type[TypeModel]: """ Get related model from SQLAlchemy model diff --git a/fastapi_jsonapi/validation_utils.py b/fastapi_jsonapi/validation_utils.py index 2a8dcb98..e34beef0 100644 --- a/fastapi_jsonapi/validation_utils.py +++ b/fastapi_jsonapi/validation_utils.py @@ -1,50 +1,22 @@ -from copy import deepcopy -from typing import TYPE_CHECKING, Callable, Dict, Optional, Set, Type +from __future__ import annotations -from pydantic import field_validator, model_validator +from typing import TYPE_CHECKING -from fastapi_jsonapi.schema_base import BaseModel +from pydantic import field_validator, model_validator if TYPE_CHECKING: - from pydantic._internal._decorators import Decorator, DecoratorInfos - + from pydantic import BaseModel as PydanticBaseModel + from pydantic._internal._decorators import DecoratorInfos + from pydantic.functional_validators import _V2Validator -def extract_root_validators(model: Type[BaseModel]) -> Dict[str, Callable]: - pre_root_validators = getattr(model, "__pre_root_validators__", []) - post_root_validators = getattr(model, "__post_root_validators__", []) - result_validators = {} - for validator_func in pre_root_validators: - result_validators[validator_func.__name__] = model_validator(mode="before") - - for validator_func in post_root_validators: - result_validators[validator_func.__name__] = model_validator( - mode="before", - ) - - return result_validators - - -def _deduplicate_field_validators(validators: "DecoratorInfos") -> Dict: - result_validators = {} - field_validators = validators.field_validators - model_validators = validators.model_validators - for category_validators in [field_validators, model_validators]: - for validator_name, field_validator_ in category_validators.items(): - func_name = field_validator_.func.__name__ - - if func_name not in result_validators: - result_validators[func_name] = field_validator_ - - return result_validators - - -def extract_field_validators( - model: Type[BaseModel], - include_for_field_names: Optional[Set[str]] = None, - exclude_for_field_names: Optional[Set[str]] = None, -): - validators = _deduplicate_field_validators(deepcopy(model.__pydantic_decorators__)) +# TODO: handle model validators? (info.model_validator) +def extract_validators( + model: type[PydanticBaseModel], + include_for_field_names: set[str] | None = None, + exclude_for_field_names: set[str] | None = None, +) -> dict[str, _V2Validator]: + validators: DecoratorInfos = model.__pydantic_decorators__ exclude_for_field_names = exclude_for_field_names or set() if include_for_field_names and exclude_for_field_names: @@ -53,34 +25,24 @@ def extract_field_validators( ) result_validators = {} - for field_name, field_validators in validators.items(): - field_validators: Decorator - if field_name in exclude_for_field_names: - continue - if include_for_field_names and field_name not in include_for_field_names: - continue - validator_params = { - "mode": field_validators.info.mode, - "check_fields": field_validators.info.check_fields, - } - validator_name = f"{field_name}" - result_validators[validator_name] = field_validator( - field_name, - **validator_params, - )(field_validators.func) + # field validators + for name, validator in validators.field_validators.items(): + for field_name in validator.info.fields: + # exclude + if field_name in exclude_for_field_names: + continue - return result_validators + # or include + if include_for_field_names and field_name not in include_for_field_names: + continue + validator_config = field_validator(field_name, mode=validator.info.mode) + result_validators[name] = validator_config(validator.func) -def extract_validators( - model: Type[BaseModel], - exclude_for_field_names: Optional[Set[str]] = None, -) -> Dict[str, Callable]: - return { - **extract_field_validators( - model=model, - exclude_for_field_names=exclude_for_field_names, - ), - **extract_root_validators(model), - } + # model validators + for name, validator in validators.model_validators.items(): + validator_config = model_validator(mode=validator.info.mode) + result_validators[name] = validator_config(validator.func) + + return result_validators diff --git a/fastapi_jsonapi/views/detail_view.py b/fastapi_jsonapi/views/detail_view.py index 712f2c17..e13fd3bc 100644 --- a/fastapi_jsonapi/views/detail_view.py +++ b/fastapi_jsonapi/views/detail_view.py @@ -1,22 +1,23 @@ +from __future__ import annotations + import logging from typing import ( TYPE_CHECKING, Any, - Dict, TypeVar, - Union, ) from fastapi_jsonapi import BadRequest -from fastapi_jsonapi.schema import ( - BaseJSONAPIItemInSchema, - JSONAPIResultDetailSchema, -) from fastapi_jsonapi.views.utils import handle_jsonapi_fields from fastapi_jsonapi.views.view_base import ViewBase if TYPE_CHECKING: from fastapi_jsonapi.data_layers.base import BaseDataLayer + from fastapi_jsonapi.data_typing import TypeSchema + from fastapi_jsonapi.schema import ( + BaseJSONAPIItemInSchema, + JSONAPIResultDetailSchema, + ) logger = logging.getLogger(__name__) @@ -27,16 +28,16 @@ class DetailViewBase(ViewBase): async def get_data_layer( self, - extra_view_deps: Dict[str, Any], - ) -> "BaseDataLayer": + extra_view_deps: dict[str, Any], + ) -> BaseDataLayer: return await self.get_data_layer_for_detail(extra_view_deps) async def handle_get_resource_detail( self, - object_id: Union[int, str], + object_id: int | str, **extra_view_deps, - ) -> Union[JSONAPIResultDetailSchema, Dict]: - dl: "BaseDataLayer" = await self.get_data_layer(extra_view_deps) + ) -> JSONAPIResultDetailSchema | dict: + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) view_kwargs = {dl.url_id_field: object_id} db_object = await dl.get_object(view_kwargs=view_kwargs, qs=self.query_params) @@ -49,17 +50,17 @@ async def handle_update_resource( obj_id: str, data_update: BaseJSONAPIItemInSchema, **extra_view_deps, - ) -> Union[JSONAPIResultDetailSchema, Dict]: - dl: "BaseDataLayer" = await self.get_data_layer(extra_view_deps) + ) -> JSONAPIResultDetailSchema | dict: + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) response = await self.process_update_object(dl=dl, obj_id=obj_id, data_update=data_update) return handle_jsonapi_fields(response, self.query_params, self.jsonapi) async def process_update_object( self, - dl: "BaseDataLayer", + dl: BaseDataLayer, obj_id: str, data_update: BaseJSONAPIItemInSchema, - ): + ) -> TypeSchema: if obj_id != data_update.id: raise BadRequest( detail="obj_id and data.id should be same", @@ -77,12 +78,12 @@ async def handle_delete_resource( obj_id: str, **extra_view_deps, ) -> None: - dl: "BaseDataLayer" = await self.get_data_layer(extra_view_deps) + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) await self.process_delete_object(dl=dl, obj_id=obj_id) async def process_delete_object( self, - dl: "BaseDataLayer", + dl: BaseDataLayer, obj_id: str, ) -> None: view_kwargs = {dl.url_id_field: obj_id} diff --git a/fastapi_jsonapi/views/list_view.py b/fastapi_jsonapi/views/list_view.py index e6fc59a1..290baf27 100644 --- a/fastapi_jsonapi/views/list_view.py +++ b/fastapi_jsonapi/views/list_view.py @@ -1,16 +1,18 @@ +from __future__ import annotations + import logging -from typing import TYPE_CHECKING, Any, Dict, Union +from typing import TYPE_CHECKING, Any -from fastapi_jsonapi.schema import ( - BaseJSONAPIItemInSchema, - JSONAPIResultDetailSchema, - JSONAPIResultListSchema, -) from fastapi_jsonapi.views.utils import handle_jsonapi_fields from fastapi_jsonapi.views.view_base import ViewBase if TYPE_CHECKING: from fastapi_jsonapi.data_layers.base import BaseDataLayer + from fastapi_jsonapi.schema import ( + BaseJSONAPIItemInSchema, + JSONAPIResultDetailSchema, + JSONAPIResultListSchema, + ) logger = logging.getLogger(__name__) @@ -21,22 +23,19 @@ def _calculate_total_pages(self, db_items_count: int) -> int: if not (pagination_size := self.query_params.pagination.size): return total_pages - total_pages = db_items_count // pagination_size + ( - # one more page if not a multiple of size - (db_items_count % pagination_size) - and 1 - ) - - return total_pages + page, remainder = divmod(db_items_count, pagination_size) + # add one more page if is not multiple of size + extra_page = remainder and 1 + return page + extra_page async def get_data_layer( self, - extra_view_deps: Dict[str, Any], - ) -> "BaseDataLayer": + extra_view_deps: dict[str, Any], + ) -> BaseDataLayer: return await self.get_data_layer_for_list(extra_view_deps) - async def handle_get_resource_list(self, **extra_view_deps) -> Union[JSONAPIResultListSchema, Dict]: - dl: "BaseDataLayer" = await self.get_data_layer(extra_view_deps) + async def handle_get_resource_list(self, **extra_view_deps) -> JSONAPIResultListSchema | dict: + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) query_params = self.query_params count, items_from_db = await dl.get_collection(qs=query_params) total_pages = self._calculate_total_pages(count) @@ -48,12 +47,16 @@ async def handle_post_resource_list( self, data_create: BaseJSONAPIItemInSchema, **extra_view_deps, - ) -> Union[JSONAPIResultDetailSchema, Dict]: - dl: "BaseDataLayer" = await self.get_data_layer(extra_view_deps) + ) -> JSONAPIResultDetailSchema: + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) response = await self.process_create_object(dl=dl, data_create=data_create) return handle_jsonapi_fields(response, self.query_params, self.jsonapi) - async def process_create_object(self, dl: "BaseDataLayer", data_create: BaseJSONAPIItemInSchema): + async def process_create_object( + self, + dl: BaseDataLayer, + data_create: BaseJSONAPIItemInSchema, + ) -> JSONAPIResultDetailSchema: created_object = await dl.create_object(data_create=data_create, view_kwargs={}) created_object_id = dl.get_object_id(created_object) @@ -64,7 +67,7 @@ async def process_create_object(self, dl: "BaseDataLayer", data_create: BaseJSON return self._build_detail_response(db_object) async def handle_delete_resource_list(self, **extra_view_deps) -> JSONAPIResultListSchema: - dl: "BaseDataLayer" = await self.get_data_layer(extra_view_deps) + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) query_params = self.query_params count, items_from_db = await dl.get_collection(qs=query_params) total_pages = self._calculate_total_pages(count) diff --git a/fastapi_jsonapi/views/utils.py b/fastapi_jsonapi/views/utils.py index 00346fac..6a1177f0 100644 --- a/fastapi_jsonapi/views/utils.py +++ b/fastapi_jsonapi/views/utils.py @@ -1,25 +1,19 @@ from __future__ import annotations from collections import defaultdict +from collections.abc import Coroutine, Iterable from enum import Enum from functools import cache from typing import ( TYPE_CHECKING, Any, Callable, - Coroutine, - Dict, - Iterable, - List, - Optional, - Set, - Type, Union, ) from pydantic import BaseModel, ConfigDict -from pydantic_core.core_schema import ModelField +from fastapi_jsonapi.common import get_relationship_info_from_field_metadata from fastapi_jsonapi.data_typing import TypeSchema from fastapi_jsonapi.schema import JSONAPIObjectSchema from fastapi_jsonapi.schema_builder import ( @@ -45,21 +39,21 @@ class HTTPMethod(Enum): @staticmethod @cache - def names() -> Set[str]: + def names() -> set[str]: return {item.name for item in HTTPMethod} class HTTPMethodConfig(BaseModel): - dependencies: Optional[Type[BaseModel]] = None - prepare_data_layer_kwargs: Optional[Callable] = None + dependencies: type[BaseModel] | None = None + prepare_data_layer_kwargs: Callable[[...], ...] | None = None model_config = ConfigDict(arbitrary_types_allowed=True) @property - def handler(self) -> Optional[Union[Callable, Coroutine]]: + def handler(self) -> Callable | Coroutine | None: return self.prepare_data_layer_kwargs -def _get_includes_indexes_by_type(included: List[JSONAPIObjectSchema]) -> Dict[str, List[int]]: +def get_includes_indexes_by_type(included: list[JSONAPIObjectSchema]) -> dict[str, list[int]]: result = defaultdict(list) for idx, item in enumerate(included): @@ -68,19 +62,12 @@ def _get_includes_indexes_by_type(included: List[JSONAPIObjectSchema]) -> Dict[s return result -# TODO: move to schema builder? -def _is_relationship_field(field: ModelField) -> bool: - return "relationship" in field.json_schema_extra - - -def _get_schema_field_names(schema: Type[TypeSchema]) -> Set[str]: - """ - Returns all attribute names except relationships - """ +def get_schema_field_names(schema: type[TypeSchema]) -> set[str]: + """Returns all attribute names except relationships""" result = set() - for field_name, field in schema.__fields__.items(): - if _is_relationship_field(field): + for field_name, field in schema.model_fields.items(): + if get_relationship_info_from_field_metadata(field): continue result.add(field_name) @@ -89,31 +76,31 @@ def _get_schema_field_names(schema: Type[TypeSchema]) -> Set[str]: def _get_exclude_fields( - schema: Type[TypeSchema], + schema: type[TypeSchema], include_fields: Iterable[str], -) -> Set[str]: - schema_fields = _get_schema_field_names(schema) +) -> set[str]: + schema_fields = get_schema_field_names(schema) if IGNORE_ALL_FIELDS_LITERAL in include_fields: return schema_fields - return set(_get_schema_field_names(schema)).difference(include_fields) + return set(get_schema_field_names(schema)).difference(include_fields) def _calculate_exclude_fields( response: JSONAPIResponse, query_params: QueryStringManager, jsonapi: RoutersJSONAPI, -) -> Dict: - included = "included" in response.__fields__ and response.included or [] +) -> dict: + included = "included" in response.model_fields and response.included or [] is_list_response = isinstance(response, JSONAPIResultListSchema) - exclude_params: Dict[str, Any] = {} + exclude_params: dict[str, Any] = {} - includes_indexes_by_type = _get_includes_indexes_by_type(included) + includes_indexes_by_type = get_includes_indexes_by_type(included) for resource_type, field_names in query_params.fields.items(): - schema = jsonapi.all_jsonapi_routers[resource_type]._schema + schema = jsonapi.all_jsonapi_routers[resource_type].schema exclude_fields = _get_exclude_fields(schema, include_fields=field_names) attributes_exclude = {"attributes": exclude_fields} @@ -143,13 +130,13 @@ def handle_jsonapi_fields( response: JSONAPIResponse, query_params: QueryStringManager, jsonapi: RoutersJSONAPI, -) -> Union[JSONAPIResponse, Dict]: +) -> JSONAPIResponse | dict: if not query_params.fields: return response exclude_params = _calculate_exclude_fields(response, query_params, jsonapi) if exclude_params: - return response.dict(exclude=exclude_params, by_alias=True) + return response.model_dump(exclude=exclude_params, by_alias=True) return response diff --git a/fastapi_jsonapi/views/view_base.py b/fastapi_jsonapi/views/view_base.py index ef3b95bd..40791c40 100644 --- a/fastapi_jsonapi/views/view_base.py +++ b/fastapi_jsonapi/views/view_base.py @@ -1,57 +1,62 @@ +from __future__ import annotations + import inspect import logging from collections import defaultdict +from collections.abc import Iterable from contextvars import ContextVar from functools import partial from typing import ( + TYPE_CHECKING, Any, Callable, ClassVar, - Dict, - Iterable, - List, - Optional, - Tuple, - Type, - Union, ) -from fastapi import Request -from pydantic import BaseModel as PydanticBaseModel -from pydantic.fields import FieldInfo from starlette.concurrency import run_in_threadpool from fastapi_jsonapi import QueryStringManager, RoutersJSONAPI +from fastapi_jsonapi.common import get_relationship_info_from_field_metadata from fastapi_jsonapi.data_layers.base import BaseDataLayer -from fastapi_jsonapi.data_typing import ( - TypeModel, - TypeSchema, -) from fastapi_jsonapi.schema import ( JSONAPIObjectSchema, JSONAPIResultListMetaSchema, JSONAPIResultListSchema, get_related_schema, + get_schema_from_field_annotation, ) -from fastapi_jsonapi.schema_base import BaseModel, RelationshipInfo -from fastapi_jsonapi.schema_builder import JSONAPIObjectSchemas +from fastapi_jsonapi.schema_base import BaseModel from fastapi_jsonapi.splitter import SPLIT_REL from fastapi_jsonapi.views.utils import ( HTTPMethod, HTTPMethodConfig, ) +if TYPE_CHECKING: + from fastapi import Request + from pydantic import BaseModel as PydanticBaseModel + from pydantic.fields import FieldInfo + + from fastapi_jsonapi.data_typing import ( + TypeModel, + TypeSchema, + ) + from fastapi_jsonapi.schema_builder import ( + JSONAPIObjectSchemas, + ) + from fastapi_jsonapi.types_metadata import RelationshipInfo + logger = logging.getLogger(__name__) previous_resource_type_ctx_var: ContextVar[str] = ContextVar("previous_resource_type_ctx_var") related_field_name_ctx_var: ContextVar[str] = ContextVar("related_field_name_ctx_var") -relationships_schema_ctx_var: ContextVar[Type[BaseModel]] = ContextVar("relationships_schema_ctx_var") -object_schema_ctx_var: ContextVar[Type[JSONAPIObjectSchema]] = ContextVar("object_schema_ctx_var") -included_object_schema_ctx_var: ContextVar[Type[TypeSchema]] = ContextVar("included_object_schema_ctx_var") +relationships_schema_ctx_var: ContextVar[type[BaseModel]] = ContextVar("relationships_schema_ctx_var") +object_schema_ctx_var: ContextVar[type[JSONAPIObjectSchema]] = ContextVar("object_schema_ctx_var") +included_object_schema_ctx_var: ContextVar[type[TypeSchema]] = ContextVar("included_object_schema_ctx_var") relationship_info_ctx_var: ContextVar[RelationshipInfo] = ContextVar("relationship_info_ctx_var") # TODO: just change state on `self`!! (refactor) -included_objects_ctx_var: ContextVar[Dict[Tuple[str, str], TypeSchema]] = ContextVar("included_objects_ctx_var") +included_objects_ctx_var: ContextVar[dict[tuple[str, str], TypeSchema]] = ContextVar("included_objects_ctx_var") class ViewBase: @@ -60,7 +65,7 @@ class ViewBase: """ data_layer_cls = BaseDataLayer - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = {} + method_dependencies: ClassVar[dict[HTTPMethod, HTTPMethodConfig]] = {} def __init__(self, *, request: Request, jsonapi: RoutersJSONAPI, **options): self.request: Request = request @@ -68,7 +73,7 @@ def __init__(self, *, request: Request, jsonapi: RoutersJSONAPI, **options): self.options: dict = options self.query_params: QueryStringManager = QueryStringManager(request=request) - def _get_data_layer(self, schema: Type[BaseModel], **dl_kwargs): + def _get_data_layer(self, schema: type[BaseModel], **dl_kwargs): return self.data_layer_cls( request=self.request, schema=schema, @@ -79,13 +84,13 @@ def _get_data_layer(self, schema: Type[BaseModel], **dl_kwargs): async def get_data_layer( self, - extra_view_deps: Dict[str, Any], + extra_view_deps: dict[str, Any], ) -> BaseDataLayer: raise NotImplementedError async def get_data_layer_for_detail( self, - extra_view_deps: Dict[str, Any], + extra_view_deps: dict[str, Any], ) -> BaseDataLayer: """ Prepares data layer for detail view @@ -101,7 +106,7 @@ async def get_data_layer_for_detail( async def get_data_layer_for_list( self, - extra_view_deps: Dict[str, Any], + extra_view_deps: dict[str, Any], ) -> BaseDataLayer: """ Prepares data layer for list view @@ -118,7 +123,7 @@ async def get_data_layer_for_list( async def _run_handler( self, handler: Callable, - dto: Optional[BaseModel] = None, + dto: BaseModel | None = None, ): handler = partial(handler, self, dto) if dto is not None else partial(handler, self) @@ -130,26 +135,22 @@ async def _run_handler( async def _handle_config( self, method_config: HTTPMethodConfig, - extra_view_deps: Dict[str, Any], - ) -> Dict[str, Any]: + extra_view_deps: dict[str, Any], + ) -> dict[str, Any]: if method_config.handler is None: return {} if method_config.dependencies: - dto_class: Type[PydanticBaseModel] = method_config.dependencies + dto_class: type[PydanticBaseModel] = method_config.dependencies dto = dto_class(**extra_view_deps) - dl_kwargs = await self._run_handler(method_config.handler, dto) - - return dl_kwargs + return await self._run_handler(method_config.handler, dto) - dl_kwargs = await self._run_handler(method_config.handler) - - return dl_kwargs + return await self._run_handler(method_config.handler) async def handle_endpoint_dependencies( self, - extra_view_deps: Dict[str, Any], - ) -> Dict: + extra_view_deps: dict[str, Any], + ) -> dict: """ :return dict: this is **kwargs for DataLayer.__init___ """ @@ -165,7 +166,7 @@ async def handle_endpoint_dependencies( return dl_kwargs - def _build_response(self, items_from_db: List[TypeModel], item_schema: Type[BaseModel]): + def _build_response(self, items_from_db: list[TypeModel], item_schema: type[BaseModel]): return self.process_includes_for_db_items( includes=self.query_params.include, # as list to reuse helper @@ -173,8 +174,11 @@ def _build_response(self, items_from_db: List[TypeModel], item_schema: Type[Base item_schema=item_schema, ) - def _build_detail_response(self, db_item: TypeModel): - result_objects, object_schemas, extras = self._build_response([db_item], self.jsonapi.schema_detail) + def _build_detail_response(self, db_item: TypeModel) -> TypeSchema: + result_objects, object_schemas, extras = self._build_response( + items_from_db=[db_item], + item_schema=self.jsonapi.schema_detail, + ) # is it ok to do through list? result_object = result_objects[0] @@ -184,11 +188,16 @@ def _build_detail_response(self, db_item: TypeModel): includes_schemas=object_schemas.included_schemas_list, ) - return detail_jsonapi_schema(data=result_object, **extras) + # TODO: handle + # Expected `Union[...]` but got `...` - serialized value may not be as expected + return detail_jsonapi_schema( + data=result_object, + **extras, + ) def _build_list_response( self, - items_from_db: List[TypeModel], + items_from_db: list[TypeModel], count: int, total_pages: int, ) -> JSONAPIResultListSchema: @@ -225,8 +234,8 @@ def get_db_item_id(cls, item_from_db: TypeModel): def prepare_related_object_data( cls, item_from_db: TypeModel, - ) -> Tuple[Dict[str, Union[str, int]], Optional[TypeSchema]]: - included_object_schema: Type[TypeSchema] = included_object_schema_ctx_var.get() + ) -> tuple[dict[str, str | int], TypeSchema | None]: + included_object_schema: type[TypeSchema] = included_object_schema_ctx_var.get() relationship_info: RelationshipInfo = relationship_info_ctx_var.get() item_id = cls.get_db_item_id(item_from_db) data_for_relationship = {"id": item_id} @@ -241,8 +250,8 @@ def prepare_related_object_data( @classmethod def prepare_data_for_relationship( cls, - related_db_item: Union[List[TypeModel], TypeModel], - ) -> Tuple[Optional[Dict[str, Union[str, int]]], List[TypeSchema]]: + related_db_item: list[TypeModel] | TypeModel, + ) -> tuple[dict[str, str | int] | None, list[TypeSchema]]: included_objects = [] if related_db_item is None: return None, included_objects @@ -257,13 +266,13 @@ def prepare_data_for_relationship( @classmethod def update_related_object( cls, - relationship_data: Union[Dict[str, str], List[Dict[str, str]]], - cache_key: Tuple[str, str], + relationship_data: dict[str, str] | list[dict[str, str]], + cache_key: tuple[str, str], related_field_name: str, ): - relationships_schema: Type[BaseModel] = relationships_schema_ctx_var.get() - object_schema: Type[JSONAPIObjectSchema] = object_schema_ctx_var.get() - included_objects: Dict[Tuple[str, str], TypeSchema] = included_objects_ctx_var.get() + relationships_schema: type[BaseModel] = relationships_schema_ctx_var.get() + object_schema: type[JSONAPIObjectSchema] = object_schema_ctx_var.get() + included_objects: dict[tuple[str, str], TypeSchema] = included_objects_ctx_var.get() relationship_data_schema = get_related_schema(relationships_schema, related_field_name) parent_included_object = included_objects.get(cache_key) @@ -280,18 +289,21 @@ def update_related_object( ), }, ) - included_objects[cache_key] = object_schema.model_validate( - parent_included_object, - ).model_copy( + + included_objects[cache_key] = object_schema( + **parent_included_object.model_dump( + exclude={"relationships"} if getattr(parent_included_object, "relationships", None) is None else None, + ), + ).copy( update={"relationships": new_relationships}, ) @classmethod def update_known_included( cls, - new_included: List[TypeSchema], + new_included: list[TypeSchema], ): - included_objects: Dict[Tuple[str, str], TypeSchema] = included_objects_ctx_var.get() + included_objects: dict[tuple[str, str], TypeSchema] = included_objects_ctx_var.get() for included in new_included: key = (included.id, included.type) @@ -343,7 +355,7 @@ def process_single_db_item_and_prepare_includes( @classmethod def process_db_items_and_prepare_includes( cls, - parent_db_items: List[TypeModel], + parent_db_items: list[TypeModel], ): next_current_db_item = [] @@ -357,12 +369,12 @@ def process_db_items_and_prepare_includes( def process_include_with_nested( self, include: str, - current_db_item: Union[List[TypeModel], TypeModel], + current_db_item: list[TypeModel] | TypeModel, item_as_schema: TypeSchema, - current_relation_schema: Type[TypeSchema], - included_objects: Dict[Tuple[str, str], TypeSchema], - requested_includes: Dict[str, Iterable[str]], - ) -> Tuple[Dict[str, TypeSchema], List[JSONAPIObjectSchema]]: + current_relation_schema: type[TypeSchema], + included_objects: dict[tuple[str, str], TypeSchema], + requested_includes: dict[str, Iterable[str]], + ) -> tuple[dict[str, TypeSchema], list[JSONAPIObjectSchema]]: root_item_key = (item_as_schema.id, item_as_schema.type) if root_item_key not in included_objects: @@ -380,10 +392,13 @@ def process_include_with_nested( schemas_include = object_schemas.can_be_included_schemas current_relation_field: FieldInfo = current_relation_schema.model_fields[related_field_name] - current_relation_schema: Type[TypeSchema] = current_relation_field.annotation + current_relation_schema: type[TypeSchema] = get_schema_from_field_annotation(current_relation_field) + # TODO: check and raise, get rid of assert! + assert current_relation_schema - relationship_info: RelationshipInfo = current_relation_field.description.extra["relationship"] - included_object_schema: Type[JSONAPIObjectSchema] = schemas_include[related_field_name] + # TODO: check for None and raise + relationship_info: RelationshipInfo = get_relationship_info_from_field_metadata(current_relation_field) + included_object_schema: type[JSONAPIObjectSchema] = schemas_include[related_field_name] if not isinstance(current_db_item, Iterable): # xxx: less if/else @@ -408,7 +423,7 @@ def process_include_with_nested( return included_objects.pop(root_item_key), list(included_objects.values()) def prep_requested_includes(self, includes: Iterable[str]): - requested_includes: Dict[str, set[str]] = defaultdict(set) + requested_includes: dict[str, set[str]] = defaultdict(set) default: str = self.jsonapi.type_ for include in includes: prev = default @@ -420,9 +435,9 @@ def prep_requested_includes(self, includes: Iterable[str]): def process_db_object( self, - includes: List[str], + includes: list[str], item: TypeModel, - item_schema: Type[TypeSchema], + item_schema: type[TypeSchema], object_schemas: JSONAPIObjectSchemas, ): included_objects = [] @@ -432,7 +447,7 @@ def process_db_object( attributes=object_schemas.attributes_schema.model_validate(item), ) - cache_included_objects: Dict[Tuple[str, str], TypeSchema] = {} + cache_included_objects: dict[tuple[str, str], TypeSchema] = {} requested_includes = self.prep_requested_includes(includes) for include in includes: @@ -451,9 +466,9 @@ def process_db_object( def process_includes_for_db_items( self, - includes: List[str], - items_from_db: List[TypeModel], - item_schema: Type[TypeSchema], + includes: list[str], + items_from_db: list[TypeModel], + item_schema: type[TypeSchema], ): object_schemas = self.jsonapi.schema_builder.create_jsonapi_object_schemas( schema=item_schema, @@ -466,7 +481,7 @@ def process_includes_for_db_items( # form: # `(type, id): serialized_object` # helps to exclude duplicates - included_objects: Dict[Tuple[str, str], TypeSchema] = {} + included_objects: dict[tuple[str, str], TypeSchema] = {} for item in items_from_db: jsonapi_object, new_included = self.process_db_object( includes=includes, diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 7663ec75..00000000 --- a/poetry.lock +++ /dev/null @@ -1,2306 +0,0 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. - -[[package]] -name = "aiosqlite" -version = "0.17.0" -description = "asyncio bridge to the standard sqlite3 module" -optional = false -python-versions = ">=3.6" -files = [ - {file = "aiosqlite-0.17.0-py3-none-any.whl", hash = "sha256:6c49dc6d3405929b1d08eeccc72306d3677503cc5e5e43771efc1e00232e8231"}, - {file = "aiosqlite-0.17.0.tar.gz", hash = "sha256:f0e6acc24bc4864149267ac82fb46dfb3be4455f99fe21df82609cc6e6baee51"}, -] - -[package.dependencies] -typing_extensions = ">=3.7.2" - -[[package]] -name = "alabaster" -version = "0.7.16" -description = "A light, configurable Sphinx theme" -optional = false -python-versions = ">=3.9" -files = [ - {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, - {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, -] - -[[package]] -name = "annotated-types" -version = "0.6.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, -] - -[[package]] -name = "anyio" -version = "4.3.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "asyncpg" -version = "0.28.0" -description = "An asyncio PostgreSQL driver" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "asyncpg-0.28.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a6d1b954d2b296292ddff4e0060f494bb4270d87fb3655dd23c5c6096d16d83"}, - {file = "asyncpg-0.28.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0740f836985fd2bd73dca42c50c6074d1d61376e134d7ad3ad7566c4f79f8184"}, - {file = "asyncpg-0.28.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e907cf620a819fab1737f2dd90c0f185e2a796f139ac7de6aa3212a8af96c050"}, - {file = "asyncpg-0.28.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b339984d55e8202e0c4b252e9573e26e5afa05617ed02252544f7b3e6de3e9"}, - {file = "asyncpg-0.28.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c402745185414e4c204a02daca3d22d732b37359db4d2e705172324e2d94e85"}, - {file = "asyncpg-0.28.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c88eef5e096296626e9688f00ab627231f709d0e7e3fb84bb4413dff81d996d7"}, - {file = "asyncpg-0.28.0-cp310-cp310-win32.whl", hash = "sha256:90a7bae882a9e65a9e448fdad3e090c2609bb4637d2a9c90bfdcebbfc334bf89"}, - {file = "asyncpg-0.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:76aacdcd5e2e9999e83c8fbcb748208b60925cc714a578925adcb446d709016c"}, - {file = "asyncpg-0.28.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a0e08fe2c9b3618459caaef35979d45f4e4f8d4f79490c9fa3367251366af207"}, - {file = "asyncpg-0.28.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b24e521f6060ff5d35f761a623b0042c84b9c9b9fb82786aadca95a9cb4a893b"}, - {file = "asyncpg-0.28.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99417210461a41891c4ff301490a8713d1ca99b694fef05dabd7139f9d64bd6c"}, - {file = "asyncpg-0.28.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f029c5adf08c47b10bcdc857001bbef551ae51c57b3110964844a9d79ca0f267"}, - {file = "asyncpg-0.28.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ad1d6abf6c2f5152f46fff06b0e74f25800ce8ec6c80967f0bc789974de3c652"}, - {file = "asyncpg-0.28.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d7fa81ada2807bc50fea1dc741b26a4e99258825ba55913b0ddbf199a10d69d8"}, - {file = "asyncpg-0.28.0-cp311-cp311-win32.whl", hash = "sha256:f33c5685e97821533df3ada9384e7784bd1e7865d2b22f153f2e4bd4a083e102"}, - {file = "asyncpg-0.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:5e7337c98fb493079d686a4a6965e8bcb059b8e1b8ec42106322fc6c1c889bb0"}, - {file = "asyncpg-0.28.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1c56092465e718a9fdcc726cc3d9dcf3a692e4834031c9a9f871d92a75d20d48"}, - {file = "asyncpg-0.28.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4acd6830a7da0eb4426249d71353e8895b350daae2380cb26d11e0d4a01c5472"}, - {file = "asyncpg-0.28.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63861bb4a540fa033a56db3bb58b0c128c56fad5d24e6d0a8c37cb29b17c1c7d"}, - {file = "asyncpg-0.28.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a93a94ae777c70772073d0512f21c74ac82a8a49be3a1d982e3f259ab5f27307"}, - {file = "asyncpg-0.28.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d14681110e51a9bc9c065c4e7944e8139076a778e56d6f6a306a26e740ed86d2"}, - {file = "asyncpg-0.28.0-cp37-cp37m-win32.whl", hash = "sha256:8aec08e7310f9ab322925ae5c768532e1d78cfb6440f63c078b8392a38aa636a"}, - {file = "asyncpg-0.28.0-cp37-cp37m-win_amd64.whl", hash = "sha256:319f5fa1ab0432bc91fb39b3960b0d591e6b5c7844dafc92c79e3f1bff96abef"}, - {file = "asyncpg-0.28.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b337ededaabc91c26bf577bfcd19b5508d879c0ad009722be5bb0a9dd30b85a0"}, - {file = "asyncpg-0.28.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4d32b680a9b16d2957a0a3cc6b7fa39068baba8e6b728f2e0a148a67644578f4"}, - {file = "asyncpg-0.28.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f62f04cdf38441a70f279505ef3b4eadf64479b17e707c950515846a2df197"}, - {file = "asyncpg-0.28.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f20cac332c2576c79c2e8e6464791c1f1628416d1115935a34ddd7121bfc6a4"}, - {file = "asyncpg-0.28.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:59f9712ce01e146ff71d95d561fb68bd2d588a35a187116ef05028675462d5ed"}, - {file = "asyncpg-0.28.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc9e9f9ff1aa0eddcc3247a180ac9e9b51a62311e988809ac6152e8fb8097756"}, - {file = "asyncpg-0.28.0-cp38-cp38-win32.whl", hash = "sha256:9e721dccd3838fcff66da98709ed884df1e30a95f6ba19f595a3706b4bc757e3"}, - {file = "asyncpg-0.28.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ba7d06a0bea539e0487234511d4adf81dc8762249858ed2a580534e1720db00"}, - {file = "asyncpg-0.28.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d009b08602b8b18edef3a731f2ce6d3f57d8dac2a0a4140367e194eabd3de457"}, - {file = "asyncpg-0.28.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ec46a58d81446d580fb21b376ec6baecab7288ce5a578943e2fc7ab73bf7eb39"}, - {file = "asyncpg-0.28.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b48ceed606cce9e64fd5480a9b0b9a95cea2b798bb95129687abd8599c8b019"}, - {file = "asyncpg-0.28.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8858f713810f4fe67876728680f42e93b7e7d5c7b61cf2118ef9153ec16b9423"}, - {file = "asyncpg-0.28.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5e18438a0730d1c0c1715016eacda6e9a505fc5aa931b37c97d928d44941b4bf"}, - {file = "asyncpg-0.28.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e9c433f6fcdd61c21a715ee9128a3ca48be8ac16fa07be69262f016bb0f4dbd2"}, - {file = "asyncpg-0.28.0-cp39-cp39-win32.whl", hash = "sha256:41e97248d9076bc8e4849da9e33e051be7ba37cd507cbd51dfe4b2d99c70e3dc"}, - {file = "asyncpg-0.28.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ed77f00c6aacfe9d79e9eff9e21729ce92a4b38e80ea99a58ed382f42ebd55b"}, - {file = "asyncpg-0.28.0.tar.gz", hash = "sha256:7252cdc3acb2f52feaa3664280d3bcd78a46bd6c10bfd681acfffefa1120e278"}, -] - -[package.extras] -docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["flake8 (>=5.0,<6.0)", "uvloop (>=0.15.3)"] - -[[package]] -name = "babel" -version = "2.14.0" -description = "Internationalization utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, - {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, -] - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "backports-tarfile" -version = "1.1.1" -description = "Backport of CPython tarfile module" -optional = false -python-versions = ">=3.8" -files = [ - {file = "backports.tarfile-1.1.1-py3-none-any.whl", hash = "sha256:73e0179647803d3726d82e76089d01d8549ceca9bace469953fcb4d97cf2d417"}, - {file = "backports_tarfile-1.1.1.tar.gz", hash = "sha256:9c2ef9696cb73374f7164e17fc761389393ca76777036f5aad42e8b93fcd8009"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] - -[[package]] -name = "black" -version = "23.12.1" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, - {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, - {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, - {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, - {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, - {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, - {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, - {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, - {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, - {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, - {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, - {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, - {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, - {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, - {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, - {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, - {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, - {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, - {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, - {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, - {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, - {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "certifi" -version = "2024.2.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, -] - -[[package]] -name = "cffi" -version = "1.16.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "cfgv" -version = "3.4.0" -description = "Validate configuration and produce human readable error messages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "coverage" -version = "7.5.0" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:432949a32c3e3f820af808db1833d6d1631664d53dd3ce487aa25d574e18ad1c"}, - {file = "coverage-7.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2bd7065249703cbeb6d4ce679c734bef0ee69baa7bff9724361ada04a15b7e3b"}, - {file = "coverage-7.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbfe6389c5522b99768a93d89aca52ef92310a96b99782973b9d11e80511f932"}, - {file = "coverage-7.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39793731182c4be939b4be0cdecde074b833f6171313cf53481f869937129ed3"}, - {file = "coverage-7.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85a5dbe1ba1bf38d6c63b6d2c42132d45cbee6d9f0c51b52c59aa4afba057517"}, - {file = "coverage-7.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:357754dcdfd811462a725e7501a9b4556388e8ecf66e79df6f4b988fa3d0b39a"}, - {file = "coverage-7.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a81eb64feded34f40c8986869a2f764f0fe2db58c0530d3a4afbcde50f314880"}, - {file = "coverage-7.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:51431d0abbed3a868e967f8257c5faf283d41ec882f58413cf295a389bb22e58"}, - {file = "coverage-7.5.0-cp310-cp310-win32.whl", hash = "sha256:f609ebcb0242d84b7adeee2b06c11a2ddaec5464d21888b2c8255f5fd6a98ae4"}, - {file = "coverage-7.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:6782cd6216fab5a83216cc39f13ebe30adfac2fa72688c5a4d8d180cd52e8f6a"}, - {file = "coverage-7.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e768d870801f68c74c2b669fc909839660180c366501d4cc4b87efd6b0eee375"}, - {file = "coverage-7.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:84921b10aeb2dd453247fd10de22907984eaf80901b578a5cf0bb1e279a587cb"}, - {file = "coverage-7.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:710c62b6e35a9a766b99b15cdc56d5aeda0914edae8bb467e9c355f75d14ee95"}, - {file = "coverage-7.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c379cdd3efc0658e652a14112d51a7668f6bfca7445c5a10dee7eabecabba19d"}, - {file = "coverage-7.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fea9d3ca80bcf17edb2c08a4704259dadac196fe5e9274067e7a20511fad1743"}, - {file = "coverage-7.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:41327143c5b1d715f5f98a397608f90ab9ebba606ae4e6f3389c2145410c52b1"}, - {file = "coverage-7.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:565b2e82d0968c977e0b0f7cbf25fd06d78d4856289abc79694c8edcce6eb2de"}, - {file = "coverage-7.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cf3539007202ebfe03923128fedfdd245db5860a36810136ad95a564a2fdffff"}, - {file = "coverage-7.5.0-cp311-cp311-win32.whl", hash = "sha256:bf0b4b8d9caa8d64df838e0f8dcf68fb570c5733b726d1494b87f3da85db3a2d"}, - {file = "coverage-7.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c6384cc90e37cfb60435bbbe0488444e54b98700f727f16f64d8bfda0b84656"}, - {file = "coverage-7.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fed7a72d54bd52f4aeb6c6e951f363903bd7d70bc1cad64dd1f087980d309ab9"}, - {file = "coverage-7.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cbe6581fcff7c8e262eb574244f81f5faaea539e712a058e6707a9d272fe5b64"}, - {file = "coverage-7.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad97ec0da94b378e593ef532b980c15e377df9b9608c7c6da3506953182398af"}, - {file = "coverage-7.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd4bacd62aa2f1a1627352fe68885d6ee694bdaebb16038b6e680f2924a9b2cc"}, - {file = "coverage-7.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adf032b6c105881f9d77fa17d9eebe0ad1f9bfb2ad25777811f97c5362aa07f2"}, - {file = "coverage-7.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ba01d9ba112b55bfa4b24808ec431197bb34f09f66f7cb4fd0258ff9d3711b1"}, - {file = "coverage-7.5.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f0bfe42523893c188e9616d853c47685e1c575fe25f737adf473d0405dcfa7eb"}, - {file = "coverage-7.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a9a7ef30a1b02547c1b23fa9a5564f03c9982fc71eb2ecb7f98c96d7a0db5cf2"}, - {file = "coverage-7.5.0-cp312-cp312-win32.whl", hash = "sha256:3c2b77f295edb9fcdb6a250f83e6481c679335ca7e6e4a955e4290350f2d22a4"}, - {file = "coverage-7.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:427e1e627b0963ac02d7c8730ca6d935df10280d230508c0ba059505e9233475"}, - {file = "coverage-7.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9dd88fce54abbdbf4c42fb1fea0e498973d07816f24c0e27a1ecaf91883ce69e"}, - {file = "coverage-7.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a898c11dca8f8c97b467138004a30133974aacd572818c383596f8d5b2eb04a9"}, - {file = "coverage-7.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07dfdd492d645eea1bd70fb1d6febdcf47db178b0d99161d8e4eed18e7f62fe7"}, - {file = "coverage-7.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3d117890b6eee85887b1eed41eefe2e598ad6e40523d9f94c4c4b213258e4a4"}, - {file = "coverage-7.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6afd2e84e7da40fe23ca588379f815fb6dbbb1b757c883935ed11647205111cb"}, - {file = "coverage-7.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a9960dd1891b2ddf13a7fe45339cd59ecee3abb6b8326d8b932d0c5da208104f"}, - {file = "coverage-7.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ced268e82af993d7801a9db2dbc1d2322e786c5dc76295d8e89473d46c6b84d4"}, - {file = "coverage-7.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e7c211f25777746d468d76f11719e64acb40eed410d81c26cefac641975beb88"}, - {file = "coverage-7.5.0-cp38-cp38-win32.whl", hash = "sha256:262fffc1f6c1a26125d5d573e1ec379285a3723363f3bd9c83923c9593a2ac25"}, - {file = "coverage-7.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:eed462b4541c540d63ab57b3fc69e7d8c84d5957668854ee4e408b50e92ce26a"}, - {file = "coverage-7.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0194d654e360b3e6cc9b774e83235bae6b9b2cac3be09040880bb0e8a88f4a1"}, - {file = "coverage-7.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33c020d3322662e74bc507fb11488773a96894aa82a622c35a5a28673c0c26f5"}, - {file = "coverage-7.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbdf2cae14a06827bec50bd58e49249452d211d9caddd8bd80e35b53cb04631"}, - {file = "coverage-7.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3235d7c781232e525b0761730e052388a01548bd7f67d0067a253887c6e8df46"}, - {file = "coverage-7.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2de4e546f0ec4b2787d625e0b16b78e99c3e21bc1722b4977c0dddf11ca84e"}, - {file = "coverage-7.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0e206259b73af35c4ec1319fd04003776e11e859936658cb6ceffdeba0f5be"}, - {file = "coverage-7.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2055c4fb9a6ff624253d432aa471a37202cd8f458c033d6d989be4499aed037b"}, - {file = "coverage-7.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:075299460948cd12722a970c7eae43d25d37989da682997687b34ae6b87c0ef0"}, - {file = "coverage-7.5.0-cp39-cp39-win32.whl", hash = "sha256:280132aada3bc2f0fac939a5771db4fbb84f245cb35b94fae4994d4c1f80dae7"}, - {file = "coverage-7.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:c58536f6892559e030e6924896a44098bc1290663ea12532c78cef71d0df8493"}, - {file = "coverage-7.5.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:2b57780b51084d5223eee7b59f0d4911c31c16ee5aa12737c7a02455829ff067"}, - {file = "coverage-7.5.0.tar.gz", hash = "sha256:cf62d17310f34084c59c01e027259076479128d11e4661bb6c9acb38c5e19bb8"}, -] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "cryptography" -version = "42.0.5" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, - {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, - {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, - {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, - {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, - {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, - {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, -] - -[package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] -nox = ["nox"] -pep8test = ["check-sdist", "click", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "distlib" -version = "0.3.8" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, -] - -[[package]] -name = "docutils" -version = "0.21.2" -description = "Docutils -- Python Documentation Utilities" -optional = false -python-versions = ">=3.9" -files = [ - {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, - {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, -] - -[[package]] -name = "editables" -version = "0.5" -description = "Editable installations" -optional = false -python-versions = ">=3.7" -files = [ - {file = "editables-0.5-py3-none-any.whl", hash = "sha256:61e5ffa82629e0d8bfe09bc44a07db3c1ab8ed1ce78a6980732870f19b5e7d4c"}, - {file = "editables-0.5.tar.gz", hash = "sha256:309627d9b5c4adc0e668d8c6fa7bac1ba7c8c5d415c2d27f60f081f8e80d1de2"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.1" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "faker" -version = "18.13.0" -description = "Faker is a Python package that generates fake data for you." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Faker-18.13.0-py3-none-any.whl", hash = "sha256:801d1a2d71f1fc54d332de2ab19de7452454309937233ea2f7485402882d67b3"}, - {file = "Faker-18.13.0.tar.gz", hash = "sha256:84bcf92bb725dd7341336eea4685df9a364f16f2470c4d29c1d7e6c5fd5a457d"}, -] - -[package.dependencies] -python-dateutil = ">=2.4" - -[[package]] -name = "fastapi" -version = "0.110.2" -description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastapi-0.110.2-py3-none-any.whl", hash = "sha256:239403f2c0a3dda07a9420f95157a7f014ddb2b770acdbc984f9bdf3ead7afdb"}, - {file = "fastapi-0.110.2.tar.gz", hash = "sha256:b53d673652da3b65e8cd787ad214ec0fe303cad00d2b529b86ce7db13f17518d"}, -] - -[package.dependencies] -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.37.2,<0.38.0" -typing-extensions = ">=4.8.0" - -[package.extras] -all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] - -[[package]] -name = "filelock" -version = "3.13.4" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, - {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] -typing = ["typing-extensions (>=4.8)"] - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = true -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "hatch" -version = "1.9.4" -description = "Modern, extensible Python project management" -optional = false -python-versions = ">=3.8" -files = [ - {file = "hatch-1.9.4-py3-none-any.whl", hash = "sha256:461eb86b4b46249e38a9a621c7239e61285fd8e14b5a1b5a727c394893a25300"}, - {file = "hatch-1.9.4.tar.gz", hash = "sha256:9bb7d1c4a7a51cc1f9e16394875c940b45fa84b698f0291529316b27d74e7f32"}, -] - -[package.dependencies] -click = ">=8.0.6" -hatchling = "<1.22" -httpx = ">=0.22.0" -hyperlink = ">=21.0.0" -keyring = ">=23.5.0" -packaging = ">=21.3" -pexpect = ">=4.8,<5.0" -platformdirs = ">=2.5.0" -rich = ">=11.2.0" -shellingham = ">=1.4.0" -tomli-w = ">=1.0" -tomlkit = ">=0.11.1" -userpath = ">=1.7,<2.0" -virtualenv = ">=20.16.2" -zstandard = "<1" - -[[package]] -name = "hatchling" -version = "1.21.1" -description = "Modern, extensible Python build backend" -optional = false -python-versions = ">=3.8" -files = [ - {file = "hatchling-1.21.1-py3-none-any.whl", hash = "sha256:21e8c13f8458b219a91cb84e5b61c15bf786695d1c4fabc29e91e78f94bfe892"}, - {file = "hatchling-1.21.1.tar.gz", hash = "sha256:bba440453a224e7d4478457fa2e8d8c3633765bafa02975a6b53b9bf917980bc"}, -] - -[package.dependencies] -editables = ">=0.3" -packaging = ">=21.3" -pathspec = ">=0.10.1" -pluggy = ">=1.0.0" -tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} -trove-classifiers = "*" - -[[package]] -name = "httpcore" -version = "0.17.3" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.7" -files = [ - {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, - {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, -] - -[package.dependencies] -anyio = ">=3.0,<5.0" -certifi = "*" -h11 = ">=0.13,<0.15" -sniffio = "==1.*" - -[package.extras] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] - -[[package]] -name = "httpx" -version = "0.24.1" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.7" -files = [ - {file = "httpx-0.24.1-py3-none-any.whl", hash = "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd"}, - {file = "httpx-0.24.1.tar.gz", hash = "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd"}, -] - -[package.dependencies] -certifi = "*" -httpcore = ">=0.15.0,<0.18.0" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] - -[[package]] -name = "hyperlink" -version = "21.0.0" -description = "A featureful, immutable, and correct URL for Python." -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, - {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, -] - -[package.dependencies] -idna = ">=2.5" - -[[package]] -name = "identify" -version = "2.5.36" -description = "File identification library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, - {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, -] - -[package.extras] -license = ["ukkonen"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "imagesize" -version = "1.4.1" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] - -[[package]] -name = "importlib-metadata" -version = "7.1.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "iso8601" -version = "1.1.0" -description = "Simple module to parse ISO 8601 dates" -optional = true -python-versions = ">=3.6.2,<4.0" -files = [ - {file = "iso8601-1.1.0-py3-none-any.whl", hash = "sha256:8400e90141bf792bce2634df533dc57e3bee19ea120a87bebcd3da89a58ad73f"}, - {file = "iso8601-1.1.0.tar.gz", hash = "sha256:32811e7b81deee2063ea6d2e94f8819a86d1f3811e49d23623a41fa832bef03f"}, -] - -[[package]] -name = "jaraco-classes" -version = "3.4.0" -description = "Utility functions for Python class constructs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"}, - {file = "jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd"}, -] - -[package.dependencies] -more-itertools = "*" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "jaraco-context" -version = "5.3.0" -description = "Useful decorators and context managers" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jaraco.context-5.3.0-py3-none-any.whl", hash = "sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266"}, - {file = "jaraco.context-5.3.0.tar.gz", hash = "sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2"}, -] - -[package.dependencies] -"backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""} - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["portend", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "jaraco-functools" -version = "4.0.1" -description = "Functools like those found in stdlib" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jaraco.functools-4.0.1-py3-none-any.whl", hash = "sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664"}, - {file = "jaraco_functools-4.0.1.tar.gz", hash = "sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8"}, -] - -[package.dependencies] -more-itertools = "*" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.classes", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "jeepney" -version = "0.8.0" -description = "Low-level, pure Python DBus protocol wrapper." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, - {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, -] - -[package.extras] -test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] -trio = ["async_generator", "trio"] - -[[package]] -name = "jinja2" -version = "3.1.3" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "keyring" -version = "25.1.0" -description = "Store and access your passwords safely." -optional = false -python-versions = ">=3.8" -files = [ - {file = "keyring-25.1.0-py3-none-any.whl", hash = "sha256:26fc12e6a329d61d24aa47b22a7c5c3f35753df7d8f2860973cf94f4e1fb3427"}, - {file = "keyring-25.1.0.tar.gz", hash = "sha256:7230ea690525133f6ad536a9b5def74a4bd52642abe594761028fc044d7c7893"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} -"jaraco.classes" = "*" -"jaraco.context" = "*" -"jaraco.functools" = "*" -jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} -pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} -SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} - -[package.extras] -completion = ["shtab (>=1.1.0)"] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "more-itertools" -version = "10.2.0" -description = "More routines for operating on iterables, beyond itertools" -optional = false -python-versions = ">=3.8" -files = [ - {file = "more-itertools-10.2.0.tar.gz", hash = "sha256:8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1"}, - {file = "more_itertools-10.2.0-py3-none-any.whl", hash = "sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684"}, -] - -[[package]] -name = "mypy" -version = "1.9.0" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, - {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, - {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, - {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, - {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, - {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, - {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, - {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, - {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, - {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, - {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, - {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, - {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, - {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, - {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, - {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, - {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, - {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, - {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "nodeenv" -version = "1.8.0" -description = "Node.js virtual environment builder" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" -files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, -] - -[package.dependencies] -setuptools = "*" - -[[package]] -name = "packaging" -version = "24.0" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "platformdirs" -version = "4.2.1" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.1-py3-none-any.whl", hash = "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"}, - {file = "platformdirs-4.2.1.tar.gz", hash = "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pre-commit" -version = "3.7.0" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -optional = false -python-versions = ">=3.9" -files = [ - {file = "pre_commit-3.7.0-py2.py3-none-any.whl", hash = "sha256:5eae9e10c2b5ac51577c3452ec0a490455c45a0533f7960f993a0d01e59decab"}, - {file = "pre_commit-3.7.0.tar.gz", hash = "sha256:e209d61b8acdcf742404408531f0c37d49d2c734fd7cff2d6076083d191cb060"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pydantic" -version = "2.7.1" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"}, - {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.18.2" -typing-extensions = ">=4.6.1" - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.18.2" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"}, - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"}, - {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"}, - {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"}, - {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"}, - {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"}, - {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"}, - {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"}, - {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"}, - {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"}, - {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"}, - {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"}, - {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"}, - {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"}, - {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pygments" -version = "2.17.2" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, -] - -[package.extras] -plugins = ["importlib-metadata"] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pypika-tortoise" -version = "0.1.6" -description = "Forked from pypika and streamline just for tortoise-orm" -optional = true -python-versions = ">=3.7,<4.0" -files = [ - {file = "pypika-tortoise-0.1.6.tar.gz", hash = "sha256:d802868f479a708e3263724c7b5719a26ad79399b2a70cea065f4a4cadbebf36"}, - {file = "pypika_tortoise-0.1.6-py3-none-any.whl", hash = "sha256:2d68bbb7e377673743cff42aa1059f3a80228d411fbcae591e4465e173109fd8"}, -] - -[[package]] -name = "pytest" -version = "7.4.4" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.21.1" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, - {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, -] - -[package.dependencies] -pytest = ">=7.0.0" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] - -[[package]] -name = "pytest-cov" -version = "4.1.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = true -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pywin32-ctypes" -version = "0.2.2" -description = "A (partial) reimplementation of pywin32 using ctypes/cffi" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, - {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "rich" -version = "13.7.1" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, - {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "ruff" -version = "0.1.15" -description = "An extremely fast Python linter and code formatter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, - {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, - {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, - {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, - {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, -] - -[[package]] -name = "secretstorage" -version = "3.3.3" -description = "Python bindings to FreeDesktop.org Secret Service API" -optional = false -python-versions = ">=3.6" -files = [ - {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, - {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, -] - -[package.dependencies] -cryptography = ">=2.0" -jeepney = ">=0.6" - -[[package]] -name = "setuptools" -version = "69.5.1" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, - {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "shellingham" -version = "1.5.4" -description = "Tool to Detect Surrounding Shell" -optional = false -python-versions = ">=3.7" -files = [ - {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, - {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, -] - -[[package]] -name = "simplejson" -version = "3.19.2" -description = "Simple, fast, extensible JSON encoder/decoder for Python" -optional = false -python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "simplejson-3.19.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3471e95110dcaf901db16063b2e40fb394f8a9e99b3fe9ee3acc6f6ef72183a2"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3194cd0d2c959062b94094c0a9f8780ffd38417a5322450a0db0ca1a23e7fbd2"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:8a390e56a7963e3946ff2049ee1eb218380e87c8a0e7608f7f8790ba19390867"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1537b3dd62d8aae644f3518c407aa8469e3fd0f179cdf86c5992792713ed717a"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a8617625369d2d03766413bff9e64310feafc9fc4f0ad2b902136f1a5cd8c6b0"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:2c433a412e96afb9a3ce36fa96c8e61a757af53e9c9192c97392f72871e18e69"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f1c70249b15e4ce1a7d5340c97670a95f305ca79f376887759b43bb33288c973"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:287e39ba24e141b046812c880f4619d0ca9e617235d74abc27267194fc0c7835"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6f0a0b41dd05eefab547576bed0cf066595f3b20b083956b1405a6f17d1be6ad"}, - {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f98d918f7f3aaf4b91f2b08c0c92b1774aea113334f7cde4fe40e777114dbe6"}, - {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d74beca677623481810c7052926365d5f07393c72cbf62d6cce29991b676402"}, - {file = "simplejson-3.19.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7f2398361508c560d0bf1773af19e9fe644e218f2a814a02210ac2c97ad70db0"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ad331349b0b9ca6da86064a3599c425c7a21cd41616e175ddba0866da32df48"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:332c848f02d71a649272b3f1feccacb7e4f7e6de4a2e6dc70a32645326f3d428"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25785d038281cd106c0d91a68b9930049b6464288cea59ba95b35ee37c2d23a5"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18955c1da6fc39d957adfa346f75226246b6569e096ac9e40f67d102278c3bcb"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:11cc3afd8160d44582543838b7e4f9aa5e97865322844b75d51bf4e0e413bb3e"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b01fda3e95d07a6148702a641e5e293b6da7863f8bc9b967f62db9461330562c"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:778331444917108fa8441f59af45886270d33ce8a23bfc4f9b192c0b2ecef1b3"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9eb117db8d7ed733a7317c4215c35993b815bf6aeab67523f1f11e108c040672"}, - {file = "simplejson-3.19.2-cp310-cp310-win32.whl", hash = "sha256:39b6d79f5cbfa3eb63a869639cfacf7c41d753c64f7801efc72692c1b2637ac7"}, - {file = "simplejson-3.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:5675e9d8eeef0aa06093c1ff898413ade042d73dc920a03e8cea2fb68f62445a"}, - {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ed628c1431100b0b65387419551e822987396bee3c088a15d68446d92f554e0c"}, - {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:adcb3332979cbc941b8fff07181f06d2b608625edc0a4d8bc3ffc0be414ad0c4"}, - {file = "simplejson-3.19.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08889f2f597ae965284d7b52a5c3928653a9406d88c93e3161180f0abc2433ba"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7938a78447174e2616be223f496ddccdbf7854f7bf2ce716dbccd958cc7d13"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a970a2e6d5281d56cacf3dc82081c95c1f4da5a559e52469287457811db6a79b"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554313db34d63eac3b3f42986aa9efddd1a481169c12b7be1e7512edebff8eaf"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d36081c0b1c12ea0ed62c202046dca11438bee48dd5240b7c8de8da62c620e9"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a3cd18e03b0ee54ea4319cdcce48357719ea487b53f92a469ba8ca8e39df285e"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66e5dc13bfb17cd6ee764fc96ccafd6e405daa846a42baab81f4c60e15650414"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:972a7833d4a1fcf7a711c939e315721a88b988553fc770a5b6a5a64bd6ebeba3"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3e74355cb47e0cd399ead3477e29e2f50e1540952c22fb3504dda0184fc9819f"}, - {file = "simplejson-3.19.2-cp311-cp311-win32.whl", hash = "sha256:1dd4f692304854352c3e396e9b5f0a9c9e666868dd0bdc784e2ac4c93092d87b"}, - {file = "simplejson-3.19.2-cp311-cp311-win_amd64.whl", hash = "sha256:9300aee2a8b5992d0f4293d88deb59c218989833e3396c824b69ba330d04a589"}, - {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b8d940fd28eb34a7084877747a60873956893e377f15a32ad445fe66c972c3b8"}, - {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4969d974d9db826a2c07671273e6b27bc48e940738d768fa8f33b577f0978378"}, - {file = "simplejson-3.19.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c594642d6b13d225e10df5c16ee15b3398e21a35ecd6aee824f107a625690374"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2f5a398b5e77bb01b23d92872255e1bcb3c0c719a3be40b8df146570fe7781a"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176a1b524a3bd3314ed47029a86d02d5a95cc0bee15bd3063a1e1ec62b947de6"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3c7363a8cb8c5238878ec96c5eb0fc5ca2cb11fc0c7d2379863d342c6ee367a"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:346820ae96aa90c7d52653539a57766f10f33dd4be609206c001432b59ddf89f"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de9a2792612ec6def556d1dc621fd6b2073aff015d64fba9f3e53349ad292734"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1c768e7584c45094dca4b334af361e43b0aaa4844c04945ac7d43379eeda9bc2"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:9652e59c022e62a5b58a6f9948b104e5bb96d3b06940c6482588176f40f4914b"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9c1a4393242e321e344213a90a1e3bf35d2f624aa8b8f6174d43e3c6b0e8f6eb"}, - {file = "simplejson-3.19.2-cp312-cp312-win32.whl", hash = "sha256:7cb98be113911cb0ad09e5523d0e2a926c09a465c9abb0784c9269efe4f95917"}, - {file = "simplejson-3.19.2-cp312-cp312-win_amd64.whl", hash = "sha256:6779105d2fcb7fcf794a6a2a233787f6bbd4731227333a072d8513b252ed374f"}, - {file = "simplejson-3.19.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:061e81ea2d62671fa9dea2c2bfbc1eec2617ae7651e366c7b4a2baf0a8c72cae"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4280e460e51f86ad76dc456acdbfa9513bdf329556ffc8c49e0200878ca57816"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11c39fbc4280d7420684494373b7c5904fa72a2b48ef543a56c2d412999c9e5d"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bccb3e88ec26ffa90f72229f983d3a5d1155e41a1171190fa723d4135523585b"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb5b50dc6dd671eb46a605a3e2eb98deb4a9af787a08fcdddabe5d824bb9664"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d94245caa3c61f760c4ce4953cfa76e7739b6f2cbfc94cc46fff6c050c2390c5"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d0e5ffc763678d48ecc8da836f2ae2dd1b6eb2d27a48671066f91694e575173c"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d222a9ed082cd9f38b58923775152003765016342a12f08f8c123bf893461f28"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8434dcdd347459f9fd9c526117c01fe7ca7b016b6008dddc3c13471098f4f0dc"}, - {file = "simplejson-3.19.2-cp36-cp36m-win32.whl", hash = "sha256:c9ac1c2678abf9270e7228133e5b77c6c3c930ad33a3c1dfbdd76ff2c33b7b50"}, - {file = "simplejson-3.19.2-cp36-cp36m-win_amd64.whl", hash = "sha256:92c4a4a2b1f4846cd4364855cbac83efc48ff5a7d7c06ba014c792dd96483f6f"}, - {file = "simplejson-3.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0d551dc931638e2102b8549836a1632e6e7cf620af3d093a7456aa642bff601d"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73a8a4653f2e809049999d63530180d7b5a344b23a793502413ad1ecea9a0290"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40847f617287a38623507d08cbcb75d51cf9d4f9551dd6321df40215128325a3"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be893258d5b68dd3a8cba8deb35dc6411db844a9d35268a8d3793b9d9a256f80"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9eb3cff1b7d71aa50c89a0536f469cb8d6dcdd585d8f14fb8500d822f3bdee4"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d0f402e787e6e7ee7876c8b05e2fe6464820d9f35ba3f172e95b5f8b699f6c7f"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fbbcc6b0639aa09b9649f36f1bcb347b19403fe44109948392fbb5ea69e48c3e"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:2fc697be37585eded0c8581c4788fcfac0e3f84ca635b73a5bf360e28c8ea1a2"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b0a3eb6dd39cce23801a50c01a0976971498da49bc8a0590ce311492b82c44b"}, - {file = "simplejson-3.19.2-cp37-cp37m-win32.whl", hash = "sha256:49f9da0d6cd17b600a178439d7d2d57c5ef01f816b1e0e875e8e8b3b42db2693"}, - {file = "simplejson-3.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c87c22bd6a987aca976e3d3e23806d17f65426191db36d40da4ae16a6a494cbc"}, - {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e4c166f743bb42c5fcc60760fb1c3623e8fda94f6619534217b083e08644b46"}, - {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0a48679310e1dd5c9f03481799311a65d343748fe86850b7fb41df4e2c00c087"}, - {file = "simplejson-3.19.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0521e0f07cb56415fdb3aae0bbd8701eb31a9dfef47bb57206075a0584ab2a2"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d2d5119b1d7a1ed286b8af37357116072fc96700bce3bec5bb81b2e7057ab41"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c1467d939932901a97ba4f979e8f2642415fcf02ea12f53a4e3206c9c03bc17"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49aaf4546f6023c44d7e7136be84a03a4237f0b2b5fb2b17c3e3770a758fc1a0"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60848ab779195b72382841fc3fa4f71698a98d9589b0a081a9399904487b5832"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0436a70d8eb42bea4fe1a1c32d371d9bb3b62c637969cb33970ad624d5a3336a"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:49e0e3faf3070abdf71a5c80a97c1afc059b4f45a5aa62de0c2ca0444b51669b"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ff836cd4041e16003549449cc0a5e372f6b6f871eb89007ab0ee18fb2800fded"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3848427b65e31bea2c11f521b6fc7a3145d6e501a1038529da2391aff5970f2f"}, - {file = "simplejson-3.19.2-cp38-cp38-win32.whl", hash = "sha256:3f39bb1f6e620f3e158c8b2eaf1b3e3e54408baca96a02fe891794705e788637"}, - {file = "simplejson-3.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:0405984f3ec1d3f8777c4adc33eac7ab7a3e629f3b1c05fdded63acc7cf01137"}, - {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:445a96543948c011a3a47c8e0f9d61e9785df2544ea5be5ab3bc2be4bd8a2565"}, - {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a8c3cc4f9dfc33220246760358c8265dad6e1104f25f0077bbca692d616d358"}, - {file = "simplejson-3.19.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af9c7e6669c4d0ad7362f79cb2ab6784d71147503e62b57e3d95c4a0f222c01c"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:064300a4ea17d1cd9ea1706aa0590dcb3be81112aac30233823ee494f02cb78a"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9453419ea2ab9b21d925d0fd7e3a132a178a191881fab4169b6f96e118cc25bb"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e038c615b3906df4c3be8db16b3e24821d26c55177638ea47b3f8f73615111c"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16ca9c90da4b1f50f089e14485db8c20cbfff2d55424062791a7392b5a9b3ff9"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1018bd0d70ce85f165185d2227c71e3b1e446186f9fa9f971b69eee223e1e3cd"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e8dd53a8706b15bc0e34f00e6150fbefb35d2fd9235d095b4f83b3c5ed4fa11d"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2d022b14d7758bfb98405672953fe5c202ea8a9ccf9f6713c5bd0718eba286fd"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:febffa5b1eda6622d44b245b0685aff6fb555ce0ed734e2d7b1c3acd018a2cff"}, - {file = "simplejson-3.19.2-cp39-cp39-win32.whl", hash = "sha256:4edcd0bf70087b244ba77038db23cd98a1ace2f91b4a3ecef22036314d77ac23"}, - {file = "simplejson-3.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:aad7405c033d32c751d98d3a65801e2797ae77fac284a539f6c3a3e13005edc4"}, - {file = "simplejson-3.19.2-py3-none-any.whl", hash = "sha256:bcedf4cae0d47839fee7de344f96b5694ca53c786f28b5f773d4f0b265a159eb"}, - {file = "simplejson-3.19.2.tar.gz", hash = "sha256:9eb442a2442ce417801c912df68e1f6ccfcd41577ae7274953ab3ad24ef7d82c"}, -] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - -[[package]] -name = "sphinx" -version = "7.3.7" -description = "Python documentation generator" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinx-7.3.7-py3-none-any.whl", hash = "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3"}, - {file = "sphinx-7.3.7.tar.gz", hash = "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc"}, -] - -[package.dependencies] -alabaster = ">=0.7.14,<0.8.0" -babel = ">=2.9" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.18.1,<0.22" -imagesize = ">=1.3" -importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} -Jinja2 = ">=3.0" -packaging = ">=21.0" -Pygments = ">=2.14" -requests = ">=2.25.0" -snowballstemmer = ">=2.0" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = ">=2.0.0" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.9" -tomli = {version = ">=2", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "importlib_metadata", "mypy (==1.9.0)", "pytest (>=6.0)", "ruff (==0.3.7)", "sphinx-lint", "tomli", "types-docutils", "types-requests"] -test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=6.0)", "setuptools (>=67.0)"] - -[[package]] -name = "sphinxcontrib-applehelp" -version = "1.0.8" -description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, - {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "1.0.6" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, - {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.0.5" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, - {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] -test = ["html5lib", "pytest"] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] - -[package.extras] -test = ["flake8", "mypy", "pytest"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "1.0.7" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, - {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "1.1.10" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, - {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sqlalchemy" -version = "2.0.29" -description = "Database Abstraction Library" -optional = true -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, - {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, - {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", optional = true, markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or extra == \"asyncio\""} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "sqlalchemy-stubs" -version = "0.4" -description = "SQLAlchemy stubs and mypy plugin" -optional = false -python-versions = "*" -files = [ - {file = "sqlalchemy-stubs-0.4.tar.gz", hash = "sha256:c665d6dd4482ef642f01027fa06c3d5e91befabb219dc71fc2a09e7d7695f7ae"}, - {file = "sqlalchemy_stubs-0.4-py3-none-any.whl", hash = "sha256:5eec7aa110adf9b957b631799a72fef396b23ff99fe296df726645d01e312aa5"}, -] - -[package.dependencies] -mypy = ">=0.790" -typing-extensions = ">=3.7.4" - -[[package]] -name = "starlette" -version = "0.37.2" -description = "The little ASGI library that shines." -optional = false -python-versions = ">=3.8" -files = [ - {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, - {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, -] - -[package.dependencies] -anyio = ">=3.4.0,<5" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} - -[package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "tomli-w" -version = "1.0.0" -description = "A lil' TOML writer" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli_w-1.0.0-py3-none-any.whl", hash = "sha256:9f2a07e8be30a0729e533ec968016807069991ae2fd921a78d42f429ae5f4463"}, - {file = "tomli_w-1.0.0.tar.gz", hash = "sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9"}, -] - -[[package]] -name = "tomlkit" -version = "0.12.4" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"}, - {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"}, -] - -[[package]] -name = "tortoise-orm" -version = "0.20.0" -description = "Easy async ORM for python, built with relations in mind" -optional = true -python-versions = ">=3.8,<4.0" -files = [ - {file = "tortoise_orm-0.20.0-py3-none-any.whl", hash = "sha256:1891ad935de689ddf002c5c65c864176d28659ab6069e45f0e2cde32359bb8d9"}, - {file = "tortoise_orm-0.20.0.tar.gz", hash = "sha256:283af584d685dcc58d6cc1da35b9115bb1e41c89075eae2a19c493b39b9b41f7"}, -] - -[package.dependencies] -aiosqlite = ">=0.16.0,<0.18.0" -iso8601 = ">=1.0.2,<2.0.0" -pypika-tortoise = ">=0.1.6,<0.2.0" -pytz = "*" - -[package.extras] -accel = ["ciso8601", "orjson", "uvloop"] -aiomysql = ["aiomysql"] -asyncmy = ["asyncmy (>=0.2.8,<0.3.0)"] -asyncodbc = ["asyncodbc (>=0.1.1,<0.2.0)"] -asyncpg = ["asyncpg"] -psycopg = ["psycopg[binary,pool] (>=3.0.12,<4.0.0)"] - -[[package]] -name = "trove-classifiers" -version = "2024.4.10" -description = "Canonical source for classifiers on PyPI (pypi.org)." -optional = false -python-versions = "*" -files = [ - {file = "trove-classifiers-2024.4.10.tar.gz", hash = "sha256:49f40bb6a746b72a1cba4f8d55ee8252169cda0f70802e3fd24f04b7fb25a492"}, - {file = "trove_classifiers-2024.4.10-py3-none-any.whl", hash = "sha256:678bd6fcc5218d72e3304e27a608acc9b91e17bd00c3f3d8c968497c843ad98b"}, -] - -[[package]] -name = "typing-extensions" -version = "4.11.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, -] - -[[package]] -name = "urllib3" -version = "2.2.1" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "userpath" -version = "1.9.2" -description = "Cross-platform tool for adding locations to the user PATH" -optional = false -python-versions = ">=3.7" -files = [ - {file = "userpath-1.9.2-py3-none-any.whl", hash = "sha256:2cbf01a23d655a1ff8fc166dfb78da1b641d1ceabf0fe5f970767d380b14e89d"}, - {file = "userpath-1.9.2.tar.gz", hash = "sha256:6c52288dab069257cc831846d15d48133522455d4677ee69a9781f11dbefd815"}, -] - -[package.dependencies] -click = "*" - -[[package]] -name = "uvicorn" -version = "0.29.0" -description = "The lightning-fast ASGI server." -optional = false -python-versions = ">=3.8" -files = [ - {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"}, - {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"}, -] - -[package.dependencies] -click = ">=7.0" -h11 = ">=0.8" -typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} - -[package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] - -[[package]] -name = "virtualenv" -version = "20.26.0" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.26.0-py3-none-any.whl", hash = "sha256:0846377ea76e818daaa3e00a4365c018bc3ac9760cbb3544de542885aad61fb3"}, - {file = "virtualenv-20.26.0.tar.gz", hash = "sha256:ec25a9671a5102c8d2657f62792a27b48f016664c6873f6beed3800008577210"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<5" - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - -[[package]] -name = "zipp" -version = "3.18.1" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "zstandard" -version = "0.22.0" -description = "Zstandard bindings for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zstandard-0.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:275df437ab03f8c033b8a2c181e51716c32d831082d93ce48002a5227ec93019"}, - {file = "zstandard-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ac9957bc6d2403c4772c890916bf181b2653640da98f32e04b96e4d6fb3252a"}, - {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe3390c538f12437b859d815040763abc728955a52ca6ff9c5d4ac707c4ad98e"}, - {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1958100b8a1cc3f27fa21071a55cb2ed32e9e5df4c3c6e661c193437f171cba2"}, - {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e1856c8313bc688d5df069e106a4bc962eef3d13372020cc6e3ebf5e045202"}, - {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1a90ba9a4c9c884bb876a14be2b1d216609385efb180393df40e5172e7ecf356"}, - {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3db41c5e49ef73641d5111554e1d1d3af106410a6c1fb52cf68912ba7a343a0d"}, - {file = "zstandard-0.22.0-cp310-cp310-win32.whl", hash = "sha256:d8593f8464fb64d58e8cb0b905b272d40184eac9a18d83cf8c10749c3eafcd7e"}, - {file = "zstandard-0.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a4b358947a65b94e2501ce3e078bbc929b039ede4679ddb0460829b12f7375"}, - {file = "zstandard-0.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:589402548251056878d2e7c8859286eb91bd841af117dbe4ab000e6450987e08"}, - {file = "zstandard-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a97079b955b00b732c6f280d5023e0eefe359045e8b83b08cf0333af9ec78f26"}, - {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:445b47bc32de69d990ad0f34da0e20f535914623d1e506e74d6bc5c9dc40bb09"}, - {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33591d59f4956c9812f8063eff2e2c0065bc02050837f152574069f5f9f17775"}, - {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:888196c9c8893a1e8ff5e89b8f894e7f4f0e64a5af4d8f3c410f0319128bb2f8"}, - {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:53866a9d8ab363271c9e80c7c2e9441814961d47f88c9bc3b248142c32141d94"}, - {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ac59d5d6910b220141c1737b79d4a5aa9e57466e7469a012ed42ce2d3995e88"}, - {file = "zstandard-0.22.0-cp311-cp311-win32.whl", hash = "sha256:2b11ea433db22e720758cba584c9d661077121fcf60ab43351950ded20283440"}, - {file = "zstandard-0.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:11f0d1aab9516a497137b41e3d3ed4bbf7b2ee2abc79e5c8b010ad286d7464bd"}, - {file = "zstandard-0.22.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6c25b8eb733d4e741246151d895dd0308137532737f337411160ff69ca24f93a"}, - {file = "zstandard-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f9b2cde1cd1b2a10246dbc143ba49d942d14fb3d2b4bccf4618d475c65464912"}, - {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a88b7df61a292603e7cd662d92565d915796b094ffb3d206579aaebac6b85d5f"}, - {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466e6ad8caefb589ed281c076deb6f0cd330e8bc13c5035854ffb9c2014b118c"}, - {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1d67d0d53d2a138f9e29d8acdabe11310c185e36f0a848efa104d4e40b808e4"}, - {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:39b2853efc9403927f9065cc48c9980649462acbdf81cd4f0cb773af2fd734bc"}, - {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8a1b2effa96a5f019e72874969394edd393e2fbd6414a8208fea363a22803b45"}, - {file = "zstandard-0.22.0-cp312-cp312-win32.whl", hash = "sha256:88c5b4b47a8a138338a07fc94e2ba3b1535f69247670abfe422de4e0b344aae2"}, - {file = "zstandard-0.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:de20a212ef3d00d609d0b22eb7cc798d5a69035e81839f549b538eff4105d01c"}, - {file = "zstandard-0.22.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d75f693bb4e92c335e0645e8845e553cd09dc91616412d1d4650da835b5449df"}, - {file = "zstandard-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:36a47636c3de227cd765e25a21dc5dace00539b82ddd99ee36abae38178eff9e"}, - {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68953dc84b244b053c0d5f137a21ae8287ecf51b20872eccf8eaac0302d3e3b0"}, - {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2612e9bb4977381184bb2463150336d0f7e014d6bb5d4a370f9a372d21916f69"}, - {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23d2b3c2b8e7e5a6cb7922f7c27d73a9a615f0a5ab5d0e03dd533c477de23004"}, - {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d43501f5f31e22baf822720d82b5547f8a08f5386a883b32584a185675c8fbf"}, - {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a493d470183ee620a3df1e6e55b3e4de8143c0ba1b16f3ded83208ea8ddfd91d"}, - {file = "zstandard-0.22.0-cp38-cp38-win32.whl", hash = "sha256:7034d381789f45576ec3f1fa0e15d741828146439228dc3f7c59856c5bcd3292"}, - {file = "zstandard-0.22.0-cp38-cp38-win_amd64.whl", hash = "sha256:d8fff0f0c1d8bc5d866762ae95bd99d53282337af1be9dc0d88506b340e74b73"}, - {file = "zstandard-0.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2fdd53b806786bd6112d97c1f1e7841e5e4daa06810ab4b284026a1a0e484c0b"}, - {file = "zstandard-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:73a1d6bd01961e9fd447162e137ed949c01bdb830dfca487c4a14e9742dccc93"}, - {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9501f36fac6b875c124243a379267d879262480bf85b1dbda61f5ad4d01b75a3"}, - {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f260e4c7294ef275744210a4010f116048e0c95857befb7462e033f09442fe"}, - {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959665072bd60f45c5b6b5d711f15bdefc9849dd5da9fb6c873e35f5d34d8cfb"}, - {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d22fdef58976457c65e2796e6730a3ea4a254f3ba83777ecfc8592ff8d77d303"}, - {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a7ccf5825fd71d4542c8ab28d4d482aace885f5ebe4b40faaa290eed8e095a4c"}, - {file = "zstandard-0.22.0-cp39-cp39-win32.whl", hash = "sha256:f058a77ef0ece4e210bb0450e68408d4223f728b109764676e1a13537d056bb0"}, - {file = "zstandard-0.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:e9e9d4e2e336c529d4c435baad846a181e39a982f823f7e4495ec0b0ec8538d2"}, - {file = "zstandard-0.22.0.tar.gz", hash = "sha256:8226a33c542bcb54cd6bd0a366067b610b41713b64c9abec1bc4533d69f51e70"}, -] - -[package.dependencies] -cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} - -[package.extras] -cffi = ["cffi (>=1.11)"] - -[extras] -databases = ["sqlalchemy", "tortoise-orm"] -sqla = ["sqlalchemy"] -tortoise = ["tortoise-orm"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.9" -content-hash = "7b85a97137b5a7a8983babc8127252ede873a1993e986f43228c1ae210eb0ab8" diff --git a/pyproject.toml b/pyproject.toml index 5ef57294..2407729c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,53 +1,37 @@ -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - [project] name = "FastAPI-JSONAPI" dynamic = ["version"] -description = "FastAPI extension to create REST web api according to JSON:API 1.0 specification with FastAPI, Pydantic and data provider of your choice (SQLAlchemy, Tortoise ORM)" +description = "FastAPI extension to create REST web api according to JSON:API 1.0 specification with FastAPI, Pydantic and data layer of your choice (SQLAlchemy, Tortoise ORM)" readme = "README.md" license = "MIT" authors = [ { name = "Aleksey Nekrasov", email = "a.nekrasov@mts.ai" }, - { name = "Suren Khorenyan", email = "s.khorenyan@mts.ai" }, + { name = "Suren Khorenyan", email = "surenkhorenyan@gmail.com" }, { name = "German Bernadskiy", email = "german11235813@gmail.com" }, ] keywords = [ "fastapi", "jsonapi", "json:api", + "sqlalchemy", ] +# TODO: update classifiers = [ "Development Status :: 5 - Production/Stable", "Operating System :: OS Independent", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Topic :: Utilities", ] dependencies = [ - "fastapi>=0.79.0", - "pydantic>=1.9.1", - "simplejson>=3.17.6", - "uvicorn>=0.18.2", + "fastapi>=0.100.0", + "pydantic>=2", ] [project.optional-dependencies] -all = [ - "pytest", - "sphinx", - "SQLAlchemy[asyncio]>=1.4.39", - "tortoise-orm>=0.19.2", -] -docs = [ - "sphinx", -] sqlalchemy = [ - "SQLAlchemy[asyncio]>=1.4.39", -] -tests = [ - "pytest", + "SQLAlchemy[asyncio]>=2", ] tortoise-orm = [ "tortoise-orm>=0.19.2", @@ -57,6 +41,14 @@ tortoise-orm = [ Documentation = "https://fastapi-jsonapi.readthedocs.io/" Source = "https://github.com/mts-ai/FastAPI-JSONAPI" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + + +[tool.hatch] + [tool.hatch.version] path = "fastapi_jsonapi/VERSION" pattern = "(?P[^']+)" @@ -72,36 +64,82 @@ packages = [ "fastapi_jsonapi" ] -[tool.poetry] -package-mode = false -[tool.poetry.dependencies] -python = "^3.9" -fastapi = ">=0.79.0" -pydantic = ">=1.9.1" -simplejson = ">=3.17.6" -uvicorn = ">=0.18.2" +[tool.hatch.envs.dev] +dependencies = [ + "httpx", + "sphinx", + "aiosqlite", + "asyncpg", + "faker", + "httpx==0.24.1", + "pytest-cov", + "pytest-asyncio==0.25.3", + "pre-commit", +] + + +[tool.hatch.envs.hatch-test] +extra-dependencies = [ + # project dependencies + "sqlalchemy[asyncio]", + "aiosqlite", + "asyncpg", + + # app test dependencies + "faker", + "httpx==0.24.1", + + # test dependencies + "pytest-cov", + "pytest-asyncio==0.25.3", + "pytest==8.3.4", + "coverage[toml]~=7.6", +] + -sqlalchemy = { version = ">=1.4.39", optional = true, extras = ["asyncio"] } -tortoise-orm = { version = ">=0.19.2", optional = true } +[[tool.hatch.envs.hatch-test.matrix]] +python = [ + "3.11", + "3.12", + "3.13", +] -[tool.poetry.group.tests.dependencies] -pytest = "^7.3.1" -faker = "^18.9.0" -httpx = "^0.24.1" -pytest-asyncio = "^0.21.0" -coverage = "^7.2.6" -pytest-cov = "^4.1.0" -aiosqlite = "0.17.0" -asyncpg = "0.28.0" +[tool.hatch.envs.hatch-test.scripts] +run = "pytest{env:HATCH_TEST_ARGS:} {args}" +cov = "pytest --cov-config .coveragerc --cov=fastapi_jsonapi --cov=testing {args}" +cov-xml = "cov --cov-report=xml:coverage/python-{matrix:python}/coverage.xml" +cov-html = [ + "cov", + "coverage html -d coverage/python-{matrix:python}", +] + + +[tool.pytest.ini_options] +minversion = "8.0" +testpaths = [ + "tests", +] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" + +# ignore DeprecationWarning for: +# pytest-asyncio detected an unclosed event loop when tearing down the event_loop +# - could not fix +filterwarnings = [ + "ignore::DeprecationWarning", +] + + +[tool.poetry] +package-mode = false [tool.poetry.group.lint.dependencies] -black = "^23.3.0" -ruff = "^0.1.8" -mypy = "^1.4.1" -sqlalchemy-stubs = "^0.4" -pre-commit = "^3.3.3" +black = "^25.1.0" +ruff = "^0.4.8" +mypy = "^1.10.0" +pre-commit = "^3.7.1" [tool.poetry.group.docs.dependencies] sphinx = "^7.0.1" @@ -110,11 +148,6 @@ sphinx = "^7.0.1" [tool.poetry.group.dev.dependencies] hatch = "^1.7.0" -[tool.poetry.extras] -sqla = ["sqlalchemy"] -tortoise = ["tortoise-orm"] -databases = ["sqlalchemy", "tortoise-orm"] - [tool.black] line-length = 119 target-version = ["py38"] @@ -122,37 +155,8 @@ target-version = ["py38"] [tool.ruff] line-length = 119 target-version = "py39" -select = [ - "E", - "W", - "F", - "I", - "D", - "A", - "C4", - "COM", - "DTZ", - "T10", - "EM", - "ISC", - "ICN", - "G", - "T20", - "PT", - "SIM", - "TID", - "ARG", - "PTH", -# "ERA", - "PL", - "PLE", - "PLR", - "PLW", - "RUF", - "ASYNC", -# "UP", -# "ANN", -] +output-format = "full" + # Exclude a variety of commonly ignored directories. exclude = [ @@ -181,45 +185,211 @@ exclude = [ "examples/api_for_tortoise_orm/*", ] -# Avoid trying to fix flake8-bugbear (`B`) violations. -unfixable = ["B"] + +[tool.ruff.lint] +select = [ + # Pyflakes + "F", + # pycodestyle + "E", "W", + # mccabe + "C90", + # isort + "I", + # pep8-naming + "N", + # pydocstyle + "D", + # pyupgrade + "UP", + # flake8-2020 + "YTT", + # flake8-annotations + # "ANN", # temporarily disabled + # flake8-async + "ASYNC", + # flake8-trio + "TRIO", + # flake8-bandit + "S", + # flake8-blind-except + "BLE", + # flake8-boolean-trap + "FBT", + # flake8-bugbear + "B", + # flake8-builtins + "A", + # flake8-commas + "COM", + # flake8-copyright + # missing-copyright-notice (CPY001)# + # "CPY", + # flake8-comprehensions + "C4", + # flake8-datetimez + "DTZ", + # flake8-debugger + "T10", + # flake8-django + "DJ", + # flake8-errmsg + "EM", + # flake8-executable + "EXE", + # flake8-future-annotations + "FA", + # flake8-implicit-str-concat + "ISC", + # flake8-import-conventions + "ICN", + # flake8-logging + "LOG", + # flake8-logging-format + "G", + # flake8-no-pep420 + "INP", + # flake8-pie + "PIE", + # flake8-print + "T20", + # flake8-pyi + "PYI", + # flake8-pytest-style + "PT", + # flake8-quotes + "Q", + # flake8-raise + "RSE", + # flake8-return + "RET", + # flake8-self + "SLF", + # flake8-slots + "SLOT", + # flake8-simplify + "SIM", + # flake8-tidy-imports + "TID", + # flake8-type-checking + "TCH", + # flake8-gettext + "INT", + # flake8-unused-arguments + "ARG", + # flake8-use-pathlib + "PTH", + # flake8-todos + "TD", + # flake8-fixme + "FIX", + # eradicate + "ERA", + # pandas-vet + "PD", + # pygrep-hooks + "PGH", + # Pylint + "PL", + # pylint more + "PLC", + "PLE", + "PLR", + "PLW", + # tryceratops + "TRY", + # flynt + "FLY", + # NumPy-specific rules + "NPY", + # Airflow + "AIR", + # Perflint + "PERF", + # refurb + "FURB", + # Ruff-specific rules + "RUF", +] extend-ignore = [ - "D401", - "D403", - "D400", - "D415", - "D100", # Missing docstring in public module - "D101", # Missing docstring in public class - "D102", # Missing docstring in public method - "D103", # Missing docstring in public function - "D104", # Missing docstring in public package - "D105", # Missing docstring in magic method - "D106", # Missing docstring in public nested class - "D107", # Missing docstring in `__init__` - "D200", # One-line docstring should fit on one line - "D203", # 1 blank line required before class docstring - "D210", # No whitespaces allowed surrounding docstring text - "D212", # Multi-line docstring summary should start at the first line - "D301", # Use r""" if any backslashes in a docstring - "D404", # First word of the docstring should not be "This" - "PLR0913", # Too many arguments to function call - "A003", # Class attribute `type` is shadowing a Python builtin - "ARG001", # Unused function argument: `{name}` - "ARG002", # Unused method argument: `{name}` - "ARG003", # Unused class method argument: `{name}` - "RUF001", # String contains ambiguous unicode character {confusable} (did you mean {representant}?) - "RUF002", # Docstring contains ambiguous unicode character {confusable} (did you mean {representant}?) - "RUF003", # Comment contains ambiguous unicode character {confusable} (did you mean {representant}?) - "PT006", # pytest parametrize tuple args -] - -[tool.ruff.per-file-ignores] + "D400", # First line should end with a period + "D401", # First line of docstring should be in imperative mood + # "D403", + "D404", # First word of the docstring should not be "This" + "D415", # First line should end with a period, question mark, or exclamation point + "D100", # Missing docstring in public module + "D101", # Missing docstring in public class + "D102", # Missing docstring in public method + "D103", # Missing docstring in public function + "D104", # Missing docstring in public package + "D105", # Missing docstring in magic method + "D106", # Missing docstring in public nested class + "D107", # Missing docstring in `__init__` + "D200", # One-line docstring should fit on one line + "D203", # 1 blank line required before class docstring + "D205", # 1 blank line required between summary line and description", + "D210", # No whitespaces allowed surrounding docstring text + "D212", # Multi-line docstring summary should start at the first line + # "D301", # Use r""" if any backslashes in a docstring + "PLR0913", # Too many arguments to function call + # "A003", # Class attribute `type` is shadowing a Python builtin + "ARG001", # Unused function argument: `{name}` + "ARG002", # Unused method argument: `{name}` + "ARG003", # Unused class method argument: `{name}` + "RUF001", # String contains ambiguous unicode character {confusable} (did you mean {representant}?) + "RUF002", # Docstring contains ambiguous unicode character {confusable} (did you mean {representant}?) + "RUF003", # Comment contains ambiguous unicode character {confusable} (did you mean {representant}?) + "PT006", # pytest parametrize tuple args + "B008", # Do not perform function call `Depends` in argument defaults + "B904", # Within an `except` clause, raise exceptions with `raise ... from err` or `raise ... from None` to distinguish them from errors in exception handling + "FIX002", # Line contains TODO, consider resolving the issue + "FIX003", # Line contains XXX, consider resolving the issue + "FIX004", # Line contains HACK, consider resolving the issue + "TD001", # Invalid TODO tag: `XXX` + "TD002", # Missing author in TODO; try: `# TODO(): ...` or `# TODO @ + "TD003", # Missing issue link on the line following this TODO + "TD005", # Missing issue description after `TODO` + "ERA001", # TODO: remove + "S101", # TODO: remove +] + + +[tool.ruff.lint.per-file-ignores] "examples/api_for_sqlalchemy/*" = [ "E402", "D105", ] +"tests/*" = [ + "S101", + "ANN001", + "S311", # Standard pseudo-random generators are not suitable for cryptographic purposes + "PLR0913", # Too many arguments in function definition + "FBT001", # FBT001 Boolean-typed positional argument in function definition + "C901", # func is too complex (11 > 10) + "N806", #Variable `{name}` in function should be lowercase +] +"tests/**/conftest.py" = [ + "PGH004", # Use specific rule codes when using `noqa` +] +"tests/test_utils/test_dependency_helper.py" = [ + "SLF001", +] + +"fastapi_jsonapi/data_layers/**.py" = [ + "FBT001", +] -[tool.ruff.mccabe] +[tool.ruff.lint.mccabe] # Unlike Flake8, default to a complexity level of 10. max-complexity = 10 + +[tool.ruff.lint.pep8-naming] +classmethod-decorators = [ + # Allow Pydantic's `@validator` decorator to trigger class method treatment. + "pydantic.validator", + # Allow SQLAlchemy's dynamic decorators, like `@field.expression`, to trigger class method treatment. + "declared_attr", + "expression", + "comparator", +] diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 2f4c80e3..00000000 --- a/pytest.ini +++ /dev/null @@ -1,2 +0,0 @@ -[pytest] -asyncio_mode = auto diff --git a/tests/common.py b/tests/common.py index ae259072..c6a352b4 100644 --- a/tests/common.py +++ b/tests/common.py @@ -5,11 +5,14 @@ from sqlalchemy.engine import Engine +def sqla_db_filepath(): + return Path(__file__).resolve().parent / "db.sqlite3" + + def sqla_uri(): testing_db_url = getenv("TESTING_DB_URL") if not testing_db_url: - db_dir = Path(__file__).resolve().parent - testing_db_url = f"sqlite+aiosqlite:///{db_dir}/db.sqlite3" + testing_db_url = f"sqlite+aiosqlite:///{sqla_db_filepath()}" return testing_db_url diff --git a/tests/common_user_api_test.py b/tests/common_user_api_test.py index 05c61c29..3b2fa18a 100644 --- a/tests/common_user_api_test.py +++ b/tests/common_user_api_test.py @@ -7,6 +7,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from starlette import status +from fastapi_jsonapi.views.view_base import ViewBase from tests.misc.utils import fake from tests.models import User from tests.schemas import UserAttributesBaseSchema @@ -44,11 +45,10 @@ def prepare_user_create_data( ): data_user_attributes = user_attributes.model_dump() data_user_attributes[self.FIELD_CUSTOM_NAME] = self.validator_create.expected_value - data_user_create = { + return { "type": resource_type, "attributes": data_user_attributes, } - return data_user_create def prepare_user_update_data( self, @@ -61,24 +61,28 @@ def prepare_user_update_data( data_user_attributes = user_attributes.model_dump() data_user_attributes[self.FIELD_CUSTOM_NAME] = self.validator_update.expected_value - data_user_update = { - "id": user.id, + return { + "id": ViewBase.get_db_item_id(user), "type": resource_type, "attributes": data_user_attributes, } - return data_user_update - def validate_field_not_passed_response(self, response, expected_status=status.HTTP_422_UNPROCESSABLE_ENTITY): + def validate_field_not_passed_response( + self, + response, + input_data: dict, + expected_status=status.HTTP_422_UNPROCESSABLE_ENTITY, + ): assert response.status_code == expected_status, response.text response_data = response.json() - assert response_data == { - "detail": [ - { - "loc": ["body", "data", "attributes", self.FIELD_CUSTOM_NAME], - "msg": "field required", - "type": "value_error.missing", - }, - ], + assert len(response_data["detail"]) == 1 + detail = response_data["detail"][0] + detail.pop("url", None) + assert detail == { + "input": input_data, + "loc": ["body", "data", "attributes", self.FIELD_CUSTOM_NAME], + "msg": "Field required", + "type": "missing", } def validate_field_value_invalid_response(self, response, validator: ValidateCustomNameEqualsBase): @@ -108,7 +112,10 @@ async def validate_user_creation_on_error_key_not_passed( } url = app.url_path_for(f"create_{resource_type}_list") response = await client.post(url, json=data_user_create) - self.validate_field_not_passed_response(response) + self.validate_field_not_passed_response( + response, + input_data=attributes_data, + ) async def validate_user_creation_test_error_value_passed_but_invalid( self, @@ -142,14 +149,17 @@ async def validate_user_update_error_key_not_passed( assert self.FIELD_CUSTOM_NAME not in attributes_data data_user_update = { "data": { - "id": user.id, + "id": ViewBase.get_db_item_id(user), "type": resource_type, "attributes": attributes_data, }, } url = app.url_path_for(f"update_{resource_type}_detail", obj_id=user.id) response = await client.patch(url, json=data_user_update) - self.validate_field_not_passed_response(response) + self.validate_field_not_passed_response( + response, + input_data=attributes_data, + ) async def validate_user_update_error_value_passed_but_invalid( self, @@ -164,7 +174,7 @@ async def validate_user_update_error_value_passed_but_invalid( assert attributes_data[self.FIELD_CUSTOM_NAME] != self.validator_update.expected_value data_user_update = { "data": { - "id": user.id, + "id": ViewBase.get_db_item_id(user), "type": resource_type, "attributes": attributes_data, }, diff --git a/tests/conftest.py b/tests/conftest.py index 45ecccfa..330519fe 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,8 +4,7 @@ import pytest from fastapi import FastAPI from httpx import AsyncClient -from pytest import fixture # noqa PT013 -from pytest_asyncio import fixture as async_fixture +from pytest_asyncio import is_async_test from tests.fixtures.app import ( # noqa app, @@ -14,7 +13,7 @@ from tests.fixtures.db_connection import ( # noqa async_engine, async_session, - async_session_plain, + refresh_db, ) from tests.fixtures.entities import ( # noqa child_1, @@ -54,7 +53,6 @@ DetailViewBaseGeneric, ListViewBaseGeneric, ) -from tests.models import Base def configure_logging(): @@ -66,28 +64,18 @@ def configure_logging(): configure_logging() +# TODO: +# https://pytest-asyncio.readthedocs.io/en/stable/how-to-guides/run_session_tests_in_same_loop.html -@pytest.fixture(scope="session") -def event_loop(): - """ - Create an instance of the default event loop for each test case. - Why: - https://stackoverflow.com/questions/66054356/multiple-async-unit-tests-fail-but-running-them-one-by-one-will-pass - """ - loop = asyncio.get_event_loop_policy().new_event_loop() - yield loop - loop.close() +def pytest_collection_modifyitems(items): + pytest_asyncio_tests = (item for item in items if is_async_test(item)) + session_scope_marker = pytest.mark.asyncio(loop_scope="session") + for async_test in pytest_asyncio_tests: + async_test.add_marker(session_scope_marker, append=False) -@async_fixture() -async def client(app: FastAPI) -> AsyncClient: # noqa +@pytest.fixture() +async def client(app: FastAPI) -> AsyncClient: # noqa: F811 async with AsyncClient(app=app, base_url="http://test") as ac: yield ac - - -@async_fixture(autouse=True) -async def refresh_db(async_engine): # noqa F811 - async with async_engine.begin() as connector: - for table in reversed(Base.metadata.sorted_tables): - await connector.execute(table.delete()) diff --git a/tests/fixtures/app.py b/tests/fixtures/app.py index 92567d65..91a9921d 100644 --- a/tests/fixtures/app.py +++ b/tests/fixtures/app.py @@ -1,5 +1,6 @@ +from __future__ import annotations + from pathlib import Path -from typing import Optional, Type import pytest from fastapi import APIRouter, FastAPI @@ -230,8 +231,8 @@ def build_app_custom( schema_in_post=None, path: str = "/misc", resource_type: str = "misc", - class_list: Type[ListViewBase] = ListViewBaseGeneric, - class_detail: Type[DetailViewBase] = DetailViewBaseGeneric, + class_list: type[ListViewBase] = ListViewBaseGeneric, + class_detail: type[DetailViewBase] = DetailViewBaseGeneric, ) -> FastAPI: router: APIRouter = APIRouter() @@ -291,12 +292,12 @@ def build_alphabet_app() -> FastAPI: class ResourceInfoDTO(BaseModel): path: str resource_type: str - model: Type[TypeModel] - schema_: Type[BaseModel] - schema_in_patch: Optional[BaseModel] = None - schema_in_post: Optional[BaseModel] = None - class_list: Type[ListViewBase] = ListViewBaseGeneric - class_detail: Type[DetailViewBase] = DetailViewBaseGeneric + model: type[TypeModel] + schema_: type[BaseModel] + schema_in_patch: BaseModel | None = None + schema_in_post: BaseModel | None = None + class_list: type[ListViewBase] = ListViewBaseGeneric + class_detail: type[DetailViewBase] = DetailViewBaseGeneric model_config = ConfigDict(arbitrary_types_allowed=True) diff --git a/tests/fixtures/db_connection.py b/tests/fixtures/db_connection.py index 70eb353e..0689e1bb 100644 --- a/tests/fixtures/db_connection.py +++ b/tests/fixtures/db_connection.py @@ -1,17 +1,32 @@ -from pytest import fixture # noqa PT013 -from pytest_asyncio import fixture as async_fixture +import pytest +from sqlalchemy import AsyncAdaptedQueuePool from sqlalchemy.engine import make_url -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.orm import sessionmaker +from sqlalchemy.ext.asyncio import ( + async_sessionmaker, + create_async_engine, +) -from tests.common import sqla_uri +from tests.common import ( + sqla_uri, +) from tests.models import Base -def get_async_sessionmaker() -> sessionmaker: - engine = create_async_engine(url=make_url(sqla_uri())) - _async_session = sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) - return _async_session +def create_engine(): + return create_async_engine( + url=make_url(sqla_uri()), + echo=False, + pool_size=10, + poolclass=AsyncAdaptedQueuePool, + ) + + +def get_async_session_maker() -> async_sessionmaker: + engine = create_engine() + return async_sessionmaker( + bind=engine, + expire_on_commit=False, + ) async def async_session_dependency(): @@ -20,37 +35,38 @@ async def async_session_dependency(): :return: """ - session_maker = get_async_sessionmaker() - async with session_maker() as db_session: # type: AsyncSession - yield db_session - await db_session.rollback() + session_factory = get_async_session_maker() + async with session_factory() as session: # type: AsyncSession + try: + yield session + finally: + await session.rollback() -@async_fixture(scope="class") +@pytest.fixture() async def async_engine(): - engine = create_async_engine( - url=make_url(sqla_uri()), - echo=False, - # echo=True, - ) - async with engine.begin() as conn: - await conn.run_sync(Base.metadata.drop_all) - await conn.run_sync(Base.metadata.create_all) - return engine + return create_engine() -@async_fixture(scope="class") -async def async_session_plain(async_engine): - session = sessionmaker( +@pytest.fixture() +async def async_session(async_engine): + session_factory = async_sessionmaker( bind=async_engine, - class_=AsyncSession, expire_on_commit=False, ) - return session + async with session_factory() as session: # type: AsyncSession + try: + yield session + finally: + await session.rollback() + + +async def recreate_tables(engine): + async with engine.begin() as connector: + await connector.run_sync(Base.metadata.drop_all) + await connector.run_sync(Base.metadata.create_all) -@async_fixture(scope="class") -async def async_session(async_session_plain): - async with async_session_plain() as session: # type: AsyncSession - yield session - await session.rollback() +@pytest.fixture() +async def refresh_db(async_engine): + await recreate_tables(async_engine) diff --git a/tests/fixtures/entities.py b/tests/fixtures/entities.py index 133e4ac9..2762ca1d 100644 --- a/tests/fixtures/entities.py +++ b/tests/fixtures/entities.py @@ -1,10 +1,8 @@ from __future__ import annotations -from typing import Awaitable, Callable, List +from typing import TYPE_CHECKING, Callable -from pytest import fixture # noqa -from pytest_asyncio import fixture as async_fixture -from sqlalchemy.ext.asyncio import AsyncSession +import pytest from tests.misc.utils import fake from tests.models import ( @@ -19,6 +17,11 @@ Workplace, ) +if TYPE_CHECKING: + from collections.abc import Awaitable + + from sqlalchemy.ext.asyncio import AsyncSession + def build_user(**fields) -> User: fake_fields = { @@ -37,7 +40,7 @@ async def create_user(async_session: AsyncSession, **fields): return user -@async_fixture() +@pytest.fixture() async def user_1(async_session: AsyncSession): user = build_user() async_session.add(user) @@ -48,7 +51,7 @@ async def user_1(async_session: AsyncSession): await async_session.commit() -@async_fixture() +@pytest.fixture() async def user_2(async_session: AsyncSession): user = build_user() async_session.add(user) @@ -59,7 +62,7 @@ async def user_2(async_session: AsyncSession): await async_session.commit() -@async_fixture() +@pytest.fixture() async def user_3(async_session: AsyncSession): user = build_user() async_session.add(user) @@ -77,7 +80,7 @@ async def build_user_bio(async_session: AsyncSession, user: User, **fields): return bio -@async_fixture() +@pytest.fixture() async def user_1_bio(async_session: AsyncSession, user_1: User) -> UserBio: return await build_user_bio( async_session, @@ -88,7 +91,7 @@ async def user_1_bio(async_session: AsyncSession, user_1: User) -> UserBio: ) -@async_fixture() +@pytest.fixture() async def user_2_bio(async_session: AsyncSession, user_2: User) -> UserBio: return await build_user_bio( async_session, @@ -107,8 +110,8 @@ async def build_post(async_session: AsyncSession, user: User, **fields) -> Post: return post -@async_fixture() -async def user_1_posts(async_session: AsyncSession, user_1: User) -> List[Post]: +@pytest.fixture() +async def user_1_posts(async_session: AsyncSession, user_1: User) -> list[Post]: posts = [ Post( title=f"post_u1_{i}", @@ -126,7 +129,7 @@ async def user_1_posts(async_session: AsyncSession, user_1: User) -> List[Post]: return posts -@async_fixture() +@pytest.fixture() async def user_1_post(async_session: AsyncSession, user_1: User): post = Post(title="post_for_u1", user=user_1) async_session.add(post) @@ -140,8 +143,8 @@ async def user_1_post(async_session: AsyncSession, user_1: User): await async_session.commit() -@async_fixture() -async def user_2_posts(async_session: AsyncSession, user_2: User) -> List[Post]: +@pytest.fixture() +async def user_2_posts(async_session: AsyncSession, user_2: User) -> list[Post]: posts = [ Post( title=f"post_u2_{i}", @@ -159,7 +162,7 @@ async def user_2_posts(async_session: AsyncSession, user_2: User) -> List[Post]: return posts -@async_fixture() +@pytest.fixture() async def user_1_comments_for_u2_posts(async_session: AsyncSession, user_1, user_2_posts): post_comments = [ PostComment( @@ -182,12 +185,12 @@ async def user_1_comments_for_u2_posts(async_session: AsyncSession, user_1, user await async_session.commit() -@fixture() -def user_1_post_for_comments(user_1_posts: List[Post]) -> Post: +@pytest.fixture() +def user_1_post_for_comments(user_1_posts: list[Post]) -> Post: return user_1_posts[0] -@async_fixture() +@pytest.fixture() async def computer_1(async_session: AsyncSession): computer = Computer(name="Halo") @@ -201,7 +204,7 @@ async def computer_1(async_session: AsyncSession): await async_session.commit() -@async_fixture() +@pytest.fixture() async def computer_2(async_session: AsyncSession): computer = Computer(name="Nestor") @@ -215,7 +218,7 @@ async def computer_2(async_session: AsyncSession): await async_session.commit() -@async_fixture() +@pytest.fixture() async def computer_factory(async_session: AsyncSession) -> Callable[[str | None], Awaitable[Computer]]: async def factory(name: str | None = None) -> Computer: computer = Computer(name=name or fake.word()) @@ -244,7 +247,7 @@ async def build_post_comment( return post_comment -@async_fixture() +@pytest.fixture() async def user_2_comment_for_one_u1_post(async_session: AsyncSession, user_2, user_1_post_for_comments): post = user_1_post_for_comments post_comment = PostComment( @@ -263,7 +266,7 @@ async def user_2_comment_for_one_u1_post(async_session: AsyncSession, user_2, us await async_session.commit() -@async_fixture() +@pytest.fixture() async def parent_1(async_session: AsyncSession): parent = Parent( name="parent_1", @@ -279,7 +282,7 @@ async def parent_1(async_session: AsyncSession): await async_session.commit() -@async_fixture() +@pytest.fixture() async def parent_2(async_session: AsyncSession): parent = Parent( name="parent_2", @@ -295,7 +298,7 @@ async def parent_2(async_session: AsyncSession): await async_session.commit() -@async_fixture() +@pytest.fixture() async def parent_3(async_session: AsyncSession): parent = Parent( name="parent_3", @@ -311,7 +314,7 @@ async def parent_3(async_session: AsyncSession): await async_session.commit() -@async_fixture() +@pytest.fixture() async def child_1(async_session: AsyncSession): child = Child( name="child_1", @@ -327,7 +330,7 @@ async def child_1(async_session: AsyncSession): await async_session.commit() -@async_fixture() +@pytest.fixture() async def child_2(async_session: AsyncSession): child = Child( name="child_2", @@ -343,7 +346,7 @@ async def child_2(async_session: AsyncSession): await async_session.commit() -@async_fixture() +@pytest.fixture() async def child_3(async_session: AsyncSession): child = Child( name="child_3", @@ -359,7 +362,7 @@ async def child_3(async_session: AsyncSession): await async_session.commit() -@async_fixture() +@pytest.fixture() async def child_4(async_session: AsyncSession): child = Child( name="child_4", @@ -375,7 +378,7 @@ async def child_4(async_session: AsyncSession): await async_session.commit() -@async_fixture() +@pytest.fixture() async def p1_c1_association( async_session: AsyncSession, parent_1: Parent, @@ -397,7 +400,7 @@ async def p1_c1_association( await async_session.commit() -@async_fixture() +@pytest.fixture() async def p2_c1_association( async_session: AsyncSession, parent_2: Parent, @@ -419,7 +422,7 @@ async def p2_c1_association( await async_session.commit() -@async_fixture() +@pytest.fixture() async def p1_c2_association( async_session: AsyncSession, parent_1: Parent, @@ -441,7 +444,7 @@ async def p1_c2_association( await async_session.commit() -@async_fixture() +@pytest.fixture() async def p2_c2_association( async_session: AsyncSession, parent_2: Parent, @@ -463,7 +466,7 @@ async def p2_c2_association( await async_session.commit() -@async_fixture() +@pytest.fixture() async def p2_c3_association( async_session: AsyncSession, parent_2: Parent, @@ -494,14 +497,14 @@ async def build_workplace(async_session: AsyncSession, **fields): return workplace -@async_fixture() +@pytest.fixture() async def workplace_1( async_session: AsyncSession, ): yield await build_workplace(async_session, name="workplace_1") -@async_fixture() +@pytest.fixture() async def workplace_2( async_session: AsyncSession, ): diff --git a/tests/fixtures/user.py b/tests/fixtures/user.py index 070d6c1d..0852e979 100644 --- a/tests/fixtures/user.py +++ b/tests/fixtures/user.py @@ -7,12 +7,11 @@ @pytest.fixture() def user_attributes_factory(): def factory(): - user_attributes = UserAttributesBaseSchema( + return UserAttributesBaseSchema( name=fake.name(), age=fake.pyint(min_value=13, max_value=99), email=fake.email(), ) - return user_attributes return factory diff --git a/tests/fixtures/views.py b/tests/fixtures/views.py index c3a583b3..35488dd1 100644 --- a/tests/fixtures/views.py +++ b/tests/fixtures/views.py @@ -1,8 +1,7 @@ -from typing import ClassVar, Dict +from typing import ClassVar from fastapi import Depends from pydantic import BaseModel, ConfigDict -from pytest import fixture # noqa from sqlalchemy.ext.asyncio import AsyncSession from fastapi_jsonapi.misc.sqla.generics.base import ( @@ -24,7 +23,7 @@ class SessionDependency(ArbitraryModelBase): session: AsyncSession = Depends(async_session_dependency) -def common_handler(view: ViewBase, dto: SessionDependency) -> Dict: +def common_handler(view: ViewBase, dto: SessionDependency) -> dict: # noqa: ARG001 return {"session": dto.session} diff --git a/tests/models.py b/tests/models.py index df555d8f..6dffef9c 100644 --- a/tests/models.py +++ b/tests/models.py @@ -1,8 +1,22 @@ -from typing import TYPE_CHECKING, List, Optional +from __future__ import annotations + +from datetime import datetime +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, +) from uuid import UUID from sqlalchemy import JSON, Column, DateTime, ForeignKey, Index, Integer, String, Text -from sqlalchemy.orm import DeclarativeBase, Mapped, backref, declared_attr, relationship +from sqlalchemy.orm import ( + DeclarativeBase, + Mapped, + backref, + declared_attr, + mapped_column, + relationship, +) from sqlalchemy.types import CHAR, TypeDecorator from tests.common import is_postgres_tests, sqla_uri @@ -20,15 +34,13 @@ def __tablename__(cls): class AutoIdMixin: - @declared_attr - def id(cls): - return Column(Integer, primary_key=True, autoincrement=True) + id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) class User(AutoIdMixin, Base): - name: str = Column(String, nullable=False, unique=True) - age: int = Column(Integer, nullable=True) - email: Optional[str] = Column(String, nullable=True) + name: Mapped[str] = mapped_column(unique=True) + age: Mapped[int | None] + email: Mapped[str | None] posts = relationship( "Post", @@ -61,7 +73,7 @@ class User(AutoIdMixin, Base): uselist=False, ) if TYPE_CHECKING: - computers: list["Computer"] + computers: list[Computer] def __repr__(self): return f"{self.__class__.__name__}(id={self.id}, name={self.name!r})" @@ -245,20 +257,20 @@ def __init__(self, *args, as_uuid=True, **kwargs): super().__init__(*args, **kwargs) self.as_uuid = as_uuid - def load_dialect_impl(self, dialect): + def load_dialect_impl(self, dialect): # noqa: ARG002 return CHAR(32) - def process_bind_param(self, value, dialect): + def process_bind_param(self, value, dialect): # noqa: ARG002 if value is None: return value if not isinstance(value, UUID): msg = f"Incorrect type got {type(value).__name__}, expected {UUID.__name__}" - raise Exception(msg) + raise TypeError(msg) return str(value) - def process_result_value(self, value, dialect): + def process_result_value(self, value, dialect): # noqa: ARG002 return value and UUID(value) @property @@ -270,7 +282,7 @@ def python_type(self): if is_postgres_tests(): # from sqlalchemy.dialects.postgresql.asyncpg import AsyncpgUUID as UUIDType # noinspection PyPep8Naming - from sqlalchemy.dialects.postgresql import UUID as UUIDType + from sqlalchemy.dialects.postgresql import UUID as UUIDType # noqa: N811 elif "sqlite" in db_uri: UUIDType = CustomUUIDType else: @@ -308,12 +320,15 @@ class SelfRelationship(Base): ) if TYPE_CHECKING: - parent_object: Optional["SelfRelationship"] + parent_object: SelfRelationship -class ContainsTimestamp(Base): - id = Column(Integer, primary_key=True) - timestamp = Column(DateTime(True), nullable=False) +class ContainsTimestamp(AutoIdMixin, Base): + type_annotation_map: ClassVar[dict[Any, Any]] = { + datetime: DateTime(timezone=True), + } + + timestamp: Mapped[datetime] class Alpha(Base): @@ -328,7 +343,10 @@ class Alpha(Base): ) beta = relationship("Beta", back_populates="alphas") gamma_id = Column(Integer, ForeignKey("gamma.id"), nullable=False) - gamma: Mapped["Gamma"] = relationship("Gamma") + gamma: Mapped[Gamma] = relationship( + "Gamma", + back_populates="alpha", + ) class BetaGammaBinding(Base): @@ -343,14 +361,14 @@ class Beta(Base): __tablename__ = "beta" id = Column(Integer, primary_key=True, autoincrement=True) - gammas: Mapped[List["Gamma"]] = relationship( + gammas: Mapped[list[Gamma]] = relationship( "Gamma", secondary="beta_gamma_binding", back_populates="betas", lazy="noload", ) alphas = relationship("Alpha") - deltas: Mapped[List["Delta"]] = relationship( + deltas: Mapped[list[Delta]] = relationship( "Delta", secondary="beta_delta_binding", lazy="noload", @@ -361,7 +379,7 @@ class Gamma(Base): __tablename__ = "gamma" id = Column(Integer, primary_key=True, autoincrement=True) - betas: Mapped[List["Beta"]] = relationship( + betas: Mapped[list[Beta]] = relationship( "Beta", secondary="beta_gamma_binding", back_populates="gammas", @@ -373,8 +391,11 @@ class Gamma(Base): nullable=False, index=True, ) - alpha = relationship("Alpha") - delta: Mapped["Delta"] = relationship("Delta") + alpha: Mapped[Alpha] = relationship( + "Alpha", + back_populates="gamma", + ) + delta: Mapped[Delta] = relationship("Delta") class BetaDeltaBinding(Base): @@ -390,8 +411,8 @@ class Delta(Base): id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String) - gammas: Mapped[List["Gamma"]] = relationship("Gamma", back_populates="delta", lazy="noload") - betas: Mapped[List["Beta"]] = relationship( + gammas: Mapped[list[Gamma]] = relationship("Gamma", back_populates="delta", lazy="noload") + betas: Mapped[list[Beta]] = relationship( "Beta", secondary="beta_delta_binding", back_populates="deltas", @@ -418,4 +439,4 @@ class CascadeCase(Base): ) if TYPE_CHECKING: - parent_item: Mapped[Optional["CascadeCase"]] + parent_item: Mapped[CascadeCase] diff --git a/tests/pytest.ini b/tests/pytest.ini deleted file mode 100644 index df291533..00000000 --- a/tests/pytest.ini +++ /dev/null @@ -1,4 +0,0 @@ -[pytest] -filterwarnings = - ignore::DeprecationWarning - ignore::PendingDeprecationWarning diff --git a/tests/schemas.py b/tests/schemas.py deleted file mode 100644 index 94e5091c..00000000 --- a/tests/schemas.py +++ /dev/null @@ -1,515 +0,0 @@ -from typing import List, Optional -from uuid import UUID - -from pydantic import ConfigDict, field_validator - -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo - - -class UserAttributesBaseSchema(BaseModel): - name: str - age: Optional[int] = None - email: Optional[str] = None - model_config = ConfigDict(from_attributes=True) - - -class UserBaseSchema(UserAttributesBaseSchema): - """User base schema.""" - - posts: Optional[List["PostSchema"]] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="post", - many=True, - ), - }, - ) - - bio: Optional["UserBioSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="user_bio", - ), - }, - ) - - computers: Optional[List["ComputerSchema"]] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="computer", - many=True, - ), - }, - ) - workplace: Optional["WorkplaceSchema"] = Field( - json_schema_extra={"relationship": RelationshipInfo(resource_type="workplace")}, - ) - - -class UserPatchSchema(UserBaseSchema): - """User PATCH schema.""" - - -class UserInSchema(UserBaseSchema): - """User input schema.""" - - -class UserInSchemaAllowIdOnPost(UserBaseSchema): - id: str = Field(json_schema_extra={"client_can_set_id": True}) - - -class UserSchema(UserInSchema): - """User item schema.""" - - model_config = ConfigDict(from_attributes=True) - - id: int - - -# User Bio Schemas ⬇️ - - -class UserBioAttributesBaseSchema(BaseModel): - """UserBio base schema.""" - - model_config = ConfigDict(from_attributes=True) - - birth_city: str - favourite_movies: str - # keys_to_ids_list: Optional[Dict[str, List[int]]] = None - - -class UserBioSchema(UserBioAttributesBaseSchema): - """UserBio item schema.""" - - id: int - user: "UserSchema" = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="user", - ), - }, - ) - - -# Post Schemas ⬇️ - - -class PostAttributesBaseSchema(BaseModel): - title: str - body: str - model_config = ConfigDict(from_attributes=True) - - -class PostBaseSchema(PostAttributesBaseSchema): - """Post base schema.""" - - user: "UserSchema" = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="user", - ), - }, - ) - comments: Optional[List["PostCommentSchema"]] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="post_comment", - many=True, - ), - }, - ) - - -class PostPatchSchema(PostBaseSchema): - """Post PATCH schema.""" - - -class PostInSchema(PostBaseSchema): - """Post input schema.""" - - -class PostSchema(PostInSchema): - """Post item schema.""" - - id: int - - -# Post Comment Schemas ⬇️ - - -class PostCommentAttributesBaseSchema(BaseModel): - text: str - model_config = ConfigDict(from_attributes=True) - - -class PostCommentBaseSchema(PostCommentAttributesBaseSchema): - """PostComment base schema.""" - - post: "PostSchema" = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="post", - ), - }, - ) - author: "UserSchema" = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="user", - ), - }, - ) - - -class PostCommentSchema(PostCommentBaseSchema): - """PostComment item schema.""" - - id: int - - -# Parents and Children associations ⬇️⬇️ - - -# Association Schemas ⬇️ - - -class ParentToChildAssociationAttributesSchema(BaseModel): - extra_data: str - model_config = ConfigDict(from_attributes=True) - - -class ParentToChildAssociationSchema(ParentToChildAssociationAttributesSchema): - parent: "ParentSchema" = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="parent", - ), - }, - ) - - child: "ChildSchema" = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="child", - ), - }, - ) - - -# Parent Schemas ⬇️ - - -class ParentAttributesSchema(BaseModel): - name: str - model_config = ConfigDict(from_attributes=True) - - -class ParentBaseSchema(ParentAttributesSchema): - """Parent base schema.""" - - children: List["ParentToChildAssociationSchema"] = Field( - default=None, - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="parent_child_association", - many=True, - ), - }, - ) - - -class ParentPatchSchema(ParentBaseSchema): - """Parent PATCH schema.""" - - -class ParentInSchema(ParentBaseSchema): - """Parent input schema.""" - - -class ParentSchema(ParentInSchema): - """Parent item schema.""" - - id: int - - -# Child Schemas ⬇️ - - -class ChildAttributesSchema(BaseModel): - name: str - model_config = ConfigDict(from_attributes=True) - - -class ChildBaseSchema(ChildAttributesSchema): - """Child base schema.""" - - parents: List["ParentToChildAssociationSchema"] = Field( - default=None, - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="parent_child_association", - many=True, - ), - }, - ) - - -class ChildPatchSchema(ChildBaseSchema): - """Child PATCH schema.""" - - -class ChildInSchema(ChildBaseSchema): - """Child input schema.""" - - -class ChildSchema(ChildInSchema): - """Child item schema.""" - - id: int - - -class ComputerAttributesBaseSchema(BaseModel): - model_config = ConfigDict(from_attributes=True) - - name: str - - -class ComputerBaseSchema(ComputerAttributesBaseSchema): - """Computer base schema.""" - - user: Optional["UserSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="user", - ), - }, - ) - - -class ComputerPatchSchema(ComputerBaseSchema): - """Computer PATCH schema.""" - - -class ComputerInSchema(ComputerBaseSchema): - """Computer input schema.""" - - -class ComputerSchema(ComputerInSchema): - """Computer item schema.""" - - model_config = ConfigDict(from_attributes=True) - - id: int - - # TODO: rename - # owner: Optional["UserSchema"] = Field( - user: Optional["UserSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="user", - ), - }, - ) - - -class WorkplaceBaseSchema(BaseModel): - """Workplace base schema.""" - - model_config = ConfigDict(from_attributes=True) - - name: str - user: Optional["UserSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="user", - ), - }, - ) - - -class WorkplacePatchSchema(ComputerBaseSchema): - """Workplace PATCH schema.""" - - -class WorkplaceInSchema(ComputerBaseSchema): - """Workplace input schema.""" - - -class WorkplaceSchema(ComputerInSchema): - """Workplace item schema.""" - - model_config = ConfigDict(from_attributes=True) - - id: int - - -# task -class TaskBaseSchema(BaseModel): - model_config = ConfigDict(from_attributes=True) - - task_ids: Optional[list[str]] = None - - # noinspection PyMethodParameters - @field_validator("task_ids", mode="before", check_fields=False) - @classmethod - def task_ids_validator(cls, value: Optional[list[str]]): - """ - return `[]`, if value is None both on get and on create - """ - return value or [] - - -class TaskPatchSchema(TaskBaseSchema): - """Task PATCH schema.""" - - -class TaskInSchema(TaskBaseSchema): - """Task create schema.""" - - -class TaskSchema(TaskBaseSchema): - """Task item schema.""" - - id: int - - -# uuid below - - -class CustomUUIDItemAttributesSchema(BaseModel): - extra_id: Optional[UUID] = None - model_config = ConfigDict(from_attributes=True) - - -class CustomUUIDItemSchema(CustomUUIDItemAttributesSchema): - id: UUID = Field(json_schema_extra={"client_can_set_id": True}) - - -class SelfRelationshipAttributesSchema(BaseModel): - name: str - self_relationship: Optional["SelfRelationshipAttributesSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="self_relationship", - ), - }, - ) - children_objects: Optional["SelfRelationshipAttributesSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="self_relationship", - many=True, - ), - }, - ) - - -class CascadeCaseSchema(BaseModel): - parent_item: Optional["CascadeCaseSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="cascade_case", - ), - }, - ) - sub_items: Optional[list["CascadeCaseSchema"]] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="cascade_case", - many=True, - ), - }, - ) - - -class CustomUserAttributesSchema(UserBaseSchema): - spam: str - eggs: str - - -class AlphaSchema(BaseModel): - beta: Optional["BetaSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="beta", - ), - }, - ) - gamma: Optional["BetaSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="gamma", - ), - }, - ) - - -class BetaSchema(BaseModel): - alphas: Optional["AlphaSchema"] = Field( - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="alpha", - ), - }, - ) - gammas: Optional["GammaSchema"] = Field( - default=None, - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="gamma", - many=True, - ), - }, - ) - deltas: Optional["DeltaSchema"] = Field( - default=None, - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="delta", - many=True, - ), - }, - ) - - -class GammaSchema(BaseModel): - betas: Optional["BetaSchema"] = Field( - default=None, - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="beta", - many=True, - ), - }, - ) - delta: Optional["DeltaSchema"] = Field( - default=None, - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="Delta", - ), - }, - ) - - -class DeltaSchema(BaseModel): - name: str - gammas: Optional["GammaSchema"] = Field( - default=None, - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="gamma", - many=True, - ), - }, - ) - betas: Optional["BetaSchema"] = Field( - default=None, - json_schema_extra={ - "relationship": RelationshipInfo( - resource_type="beta", - many=True, - ), - }, - ) diff --git a/tests/schemas/__init__.py b/tests/schemas/__init__.py new file mode 100644 index 00000000..652a03e0 --- /dev/null +++ b/tests/schemas/__init__.py @@ -0,0 +1,103 @@ +__all__ = ( + "AlphaSchema", + "BetaSchema", + "CascadeCaseSchema", + "ChildAttributesSchema", + "ChildInSchema", + "ChildPatchSchema", + "ChildSchema", + "ComputerAttributesBaseSchema", + "ComputerInSchema", + "ComputerPatchSchema", + "ComputerSchema", + "CustomUUIDItemAttributesSchema", + "CustomUUIDItemSchema", + "DeltaSchema", + "GammaSchema", + "ParentAttributesSchema", + "ParentPatchSchema", + "ParentSchema", + "ParentToChildAssociationAttributesSchema", + "ParentToChildAssociationSchema", + "PostAttributesBaseSchema", + "PostInSchema", + "PostPatchSchema", + "PostSchema", + "PostCommentAttributesBaseSchema", + "PostCommentSchema", + "SelfRelationshipAttributesSchema", + "TaskBaseSchema", + "TaskInSchema", + "TaskPatchSchema", + "TaskSchema", + "CustomUserAttributesSchema", + "UserAttributesBaseSchema", + "UserInSchema", + "UserInSchemaAllowIdOnPost", + "UserPatchSchema", + "UserSchema", + "UserBioAttributesBaseSchema", + "UserBioSchema", + "WorkplaceSchema", +) + +from .alpha import AlphaSchema +from .beta import BetaSchema +from .cascade_case import CascadeCaseSchema +from .child import ( + ChildAttributesSchema, + ChildInSchema, + ChildPatchSchema, + ChildSchema, +) +from .computer import ( + ComputerAttributesBaseSchema, + ComputerInSchema, + ComputerPatchSchema, + ComputerSchema, +) +from .custom_uuid import ( + CustomUUIDItemAttributesSchema, + CustomUUIDItemSchema, +) +from .delta import DeltaSchema +from .gamma import GammaSchema +from .parent import ( + ParentAttributesSchema, + ParentPatchSchema, + ParentSchema, +) +from .parent_to_child import ( + ParentToChildAssociationAttributesSchema, + ParentToChildAssociationSchema, +) +from .post import ( + PostAttributesBaseSchema, + PostInSchema, + PostPatchSchema, + PostSchema, +) +from .post_comment import ( + PostCommentAttributesBaseSchema, + PostCommentSchema, +) +from .self_relationship import SelfRelationshipAttributesSchema +from .task import ( + TaskBaseSchema, + TaskInSchema, + TaskPatchSchema, + TaskSchema, +) +from .user import ( + CustomUserAttributesSchema, + UserAttributesBaseSchema, + UserInSchema, + UserInSchemaAllowIdOnPost, + UserPatchSchema, + UserSchema, +) +from .user_bio import ( + UserBioAttributesBaseSchema, + UserBioSchema, +) +from .workplace import WorkplaceSchema diff --git a/tests/schemas/alpha.py b/tests/schemas/alpha.py new file mode 100644 index 00000000..48734588 --- /dev/null +++ b/tests/schemas/alpha.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from tests.schemas import BetaSchema, GammaSchema + + +class AlphaSchema(BaseModel): + beta: Annotated[ + BetaSchema | None, + RelationshipInfo( + resource_type="beta", + ), + ] = None + gamma: Annotated[ + GammaSchema | None, + RelationshipInfo( + resource_type="gamma", + ), + ] = None diff --git a/tests/schemas/beta.py b/tests/schemas/beta.py new file mode 100644 index 00000000..34d2a5e2 --- /dev/null +++ b/tests/schemas/beta.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from tests.schemas import AlphaSchema, DeltaSchema, GammaSchema + + +class BetaSchema(BaseModel): + alphas: Annotated[ + AlphaSchema | None, + RelationshipInfo( + resource_type="alpha", + ), + ] = None + gammas: Annotated[ + GammaSchema | None, + RelationshipInfo( + resource_type="gamma", + many=True, + ), + ] = None + deltas: Annotated[ + DeltaSchema | None, + RelationshipInfo( + resource_type="delta", + many=True, + ), + ] = None diff --git a/tests/schemas/cascade_case.py b/tests/schemas/cascade_case.py new file mode 100644 index 00000000..1f8fa890 --- /dev/null +++ b/tests/schemas/cascade_case.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from typing import Annotated + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + + +class CascadeCaseSchema(BaseModel): + parent_item: Annotated[ + CascadeCaseSchema | None, + RelationshipInfo( + resource_type="cascade_case", + ), + ] = None + sub_items: Annotated[ + list[CascadeCaseSchema] | None, + RelationshipInfo( + resource_type="cascade_case", + many=True, + ), + ] = None diff --git a/tests/schemas/child.py b/tests/schemas/child.py new file mode 100644 index 00000000..8ef53cf3 --- /dev/null +++ b/tests/schemas/child.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from tests.schemas import ParentToChildAssociationSchema + + +class ChildAttributesSchema(BaseModel): + name: str + model_config = ConfigDict(from_attributes=True) + + +class ChildBaseSchema(ChildAttributesSchema): + """Child base schema.""" + + parents: Annotated[ + list[ParentToChildAssociationSchema] | None, + RelationshipInfo( + resource_type="parent_child_association", + many=True, + ), + ] = None + + +class ChildPatchSchema(ChildBaseSchema): + """Child PATCH schema.""" + + +class ChildInSchema(ChildBaseSchema): + """Child input schema.""" + + +class ChildSchema(ChildInSchema): + """Child item schema.""" + + id: int diff --git a/tests/schemas/computer.py b/tests/schemas/computer.py new file mode 100644 index 00000000..9553c124 --- /dev/null +++ b/tests/schemas/computer.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from tests.schemas import UserSchema + + +class ComputerAttributesBaseSchema(BaseModel): + model_config = ConfigDict(from_attributes=True) + + name: str + + +class ComputerBaseSchema(ComputerAttributesBaseSchema): + """Computer base schema.""" + + user: Annotated[ + UserSchema | None, + RelationshipInfo( + resource_type="user", + ), + ] = None + + +class ComputerPatchSchema(ComputerBaseSchema): + """Computer PATCH schema.""" + + +class ComputerInSchema(ComputerBaseSchema): + """Computer input schema.""" + + +class ComputerSchema(ComputerInSchema): + """Computer item schema.""" + + model_config = ConfigDict(from_attributes=True) + + id: int + + # TODO: rename + # owner: UserSchema | None"] = Field( + user: Annotated[ + UserSchema | None, + RelationshipInfo( + resource_type="user", + ), + ] = None diff --git a/tests/schemas/custom_uuid.py b/tests/schemas/custom_uuid.py new file mode 100644 index 00000000..cacd18bb --- /dev/null +++ b/tests/schemas/custom_uuid.py @@ -0,0 +1,23 @@ +from __future__ import annotations + +from typing import ( + Annotated, + Optional, +) +from uuid import UUID + +from pydantic import ( + ConfigDict, +) + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import ClientCanSetId + + +class CustomUUIDItemAttributesSchema(BaseModel): + extra_id: UUID | None = None + model_config = ConfigDict(from_attributes=True) + + +class CustomUUIDItemSchema(CustomUUIDItemAttributesSchema): + id: Annotated[UUID, ClientCanSetId()] diff --git a/tests/schemas/delta.py b/tests/schemas/delta.py new file mode 100644 index 00000000..244dfcde --- /dev/null +++ b/tests/schemas/delta.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from tests.schemas import BetaSchema, GammaSchema + + +class DeltaSchema(BaseModel): + name: str + gammas: Annotated[ + GammaSchema | None, + RelationshipInfo( + resource_type="gamma", + many=True, + ), + ] = None + betas: Annotated[ + BetaSchema | None, + RelationshipInfo( + resource_type="beta", + many=True, + ), + ] = None diff --git a/tests/schemas/gamma.py b/tests/schemas/gamma.py new file mode 100644 index 00000000..1f032fdd --- /dev/null +++ b/tests/schemas/gamma.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from tests.schemas import BetaSchema, DeltaSchema + + +class GammaSchema(BaseModel): + betas: Annotated[ + BetaSchema | None, + RelationshipInfo( + resource_type="beta", + many=True, + ), + ] = None + delta: Annotated[ + DeltaSchema | None, + RelationshipInfo( + resource_type="Delta", + ), + ] = None diff --git a/tests/schemas/parent.py b/tests/schemas/parent.py new file mode 100644 index 00000000..f20ae5dc --- /dev/null +++ b/tests/schemas/parent.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from tests.schemas import ParentToChildAssociationSchema + + +class ParentAttributesSchema(BaseModel): + name: str + model_config = ConfigDict(from_attributes=True) + + +class ParentBaseSchema(ParentAttributesSchema): + """Parent base schema.""" + + children: Annotated[ + ParentToChildAssociationSchema | None, + RelationshipInfo( + resource_type="parent_child_association", + many=True, + ), + ] = None + + +class ParentPatchSchema(ParentBaseSchema): + """Parent PATCH schema.""" + + +class ParentInSchema(ParentBaseSchema): + """Parent input schema.""" + + +class ParentSchema(ParentInSchema): + """Parent item schema.""" + + id: int diff --git a/tests/schemas/parent_to_child.py b/tests/schemas/parent_to_child.py new file mode 100644 index 00000000..3c939569 --- /dev/null +++ b/tests/schemas/parent_to_child.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from tests.schemas import ChildSchema, ParentSchema + + +class ParentToChildAssociationAttributesSchema(BaseModel): + extra_data: str + model_config = ConfigDict(from_attributes=True) + + +class ParentToChildAssociationSchema(ParentToChildAssociationAttributesSchema): + parent: Annotated[ + ParentSchema | None, + RelationshipInfo( + resource_type="parent", + ), + ] = None + + child: Annotated[ + ChildSchema | None, + RelationshipInfo( + resource_type="child", + ), + ] = None diff --git a/tests/schemas/post.py b/tests/schemas/post.py new file mode 100644 index 00000000..e41aeefd --- /dev/null +++ b/tests/schemas/post.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from tests.schemas import PostCommentSchema, UserSchema + + +class PostAttributesBaseSchema(BaseModel): + title: str + body: str + model_config = ConfigDict(from_attributes=True) + + +class PostBaseSchema(PostAttributesBaseSchema): + """Post base schema.""" + + user: Annotated[ + UserSchema | None, + RelationshipInfo( + resource_type="user", + ), + ] = None + comments: Annotated[ + list[PostCommentSchema] | None, + RelationshipInfo( + resource_type="post_comment", + many=True, + ), + ] = None + + +class PostPatchSchema(PostBaseSchema): + """Post PATCH schema.""" + + +class PostInSchema(PostBaseSchema): + """Post input schema.""" + + +class PostSchema(PostInSchema): + """Post item schema.""" + + id: int diff --git a/tests/schemas/post_comment.py b/tests/schemas/post_comment.py new file mode 100644 index 00000000..5ba3d340 --- /dev/null +++ b/tests/schemas/post_comment.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from tests.schemas import PostSchema, UserSchema + + +class PostCommentAttributesBaseSchema(BaseModel): + text: str + model_config = ConfigDict(from_attributes=True) + + +class PostCommentBaseSchema(PostCommentAttributesBaseSchema): + """PostComment base schema.""" + + post: Annotated[ + # PostSchema | None, + PostSchema, + RelationshipInfo( + resource_type="post", + ), + ] + author: Annotated[ + # UserSchema | None, + UserSchema, + RelationshipInfo( + resource_type="user", + ), + ] + + +class PostCommentSchema(PostCommentBaseSchema): + """PostComment item schema.""" + + id: int diff --git a/tests/schemas/self_relationship.py b/tests/schemas/self_relationship.py new file mode 100644 index 00000000..3798ee03 --- /dev/null +++ b/tests/schemas/self_relationship.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +from typing import Annotated + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + + +class SelfRelationshipAttributesSchema(BaseModel): + name: str + parent_object: Annotated[ + SelfRelationshipAttributesSchema | None, + RelationshipInfo( + resource_type="self_relationship", + ), + ] = None + children_objects: Annotated[ + list[SelfRelationshipAttributesSchema] | None, + RelationshipInfo( + resource_type="self_relationship", + many=True, + ), + ] = None diff --git a/tests/schemas/task.py b/tests/schemas/task.py new file mode 100644 index 00000000..663eab58 --- /dev/null +++ b/tests/schemas/task.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +from typing import Annotated + +from pydantic import ( + BeforeValidator, + ConfigDict, + field_validator, +) + +from fastapi_jsonapi.schema_base import BaseModel + + +def func_validator(value: list[str] | None) -> list[str]: + """ + return `[]`, if value is None both on get and on create + + :param value: + :return: + """ + return value or [] + + +class TaskBaseSchema(BaseModel): + model_config = ConfigDict(from_attributes=True) + + # TODO: check BeforeValidator annotated + task_ids: list[str] | None = None + another_task_ids: Annotated[list[str] | None, BeforeValidator(func_validator)] + + # noinspection PyMethodParameters + @field_validator("task_ids", mode="before") + @staticmethod + def task_ids_validator(value: list[str] | None): + """ + return `[]`, if value is None both on get and on create + """ + return func_validator(value) + + +class TaskPatchSchema(TaskBaseSchema): + """Task PATCH schema.""" + + +class TaskInSchema(TaskBaseSchema): + """Task create schema.""" + + +class TaskSchema(TaskBaseSchema): + """Task item schema.""" + + id: int diff --git a/tests/schemas/user.py b/tests/schemas/user.py new file mode 100644 index 00000000..8f031463 --- /dev/null +++ b/tests/schemas/user.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import ClientCanSetId, RelationshipInfo + +if TYPE_CHECKING: + from tests.schemas import ( + ComputerSchema, + PostSchema, + UserBioSchema, + WorkplaceSchema, + ) + + +class UserAttributesBaseSchema(BaseModel): + model_config = ConfigDict(from_attributes=True) + + name: str + age: int | None = None + email: str | None = None + + +class UserBaseSchema(UserAttributesBaseSchema): + """User base schema.""" + + posts: Annotated[ + list[PostSchema] | None, + RelationshipInfo( + resource_type="post", + many=True, + ), + ] = None + + bio: Annotated[ + UserBioSchema | None, + RelationshipInfo( + resource_type="user_bio", + ), + ] = None + + computers: Annotated[ + list[ComputerSchema] | None, + RelationshipInfo( + resource_type="computer", + many=True, + ), + ] = None + + workplace: Annotated[ + WorkplaceSchema | None, + RelationshipInfo( + resource_type="workplace", + ), + ] = None + + +class UserPatchSchema(UserBaseSchema): + """User PATCH schema.""" + + +class UserInSchema(UserBaseSchema): + """User input schema.""" + + +class UserInSchemaAllowIdOnPost(UserBaseSchema): + # TODO: handle non-instance + id: Annotated[str, ClientCanSetId()] + + +class UserSchema(UserBaseSchema): + """User item schema.""" + + id: int + + +class CustomUserAttributesSchema(UserBaseSchema): + spam: str + eggs: str diff --git a/tests/schemas/user_bio.py b/tests/schemas/user_bio.py new file mode 100644 index 00000000..2c56e404 --- /dev/null +++ b/tests/schemas/user_bio.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from tests.schemas import UserSchema + + +class UserBioAttributesBaseSchema(BaseModel): + """UserBio base schema.""" + + model_config = ConfigDict(from_attributes=True) + + birth_city: str + favourite_movies: str + # TODO: + # keys_to_ids_list: Optional[dict[str, list[int]]] = None + + +class UserBioSchema(UserBioAttributesBaseSchema): + """UserBio item schema.""" + + id: int + user: Annotated[ + UserSchema | None, + RelationshipInfo( + resource_type="user", + ), + ] = None diff --git a/tests/schemas/workplace.py b/tests/schemas/workplace.py new file mode 100644 index 00000000..86c1854e --- /dev/null +++ b/tests/schemas/workplace.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Annotated, +) + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import ( + BaseModel, +) +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from tests.schemas import UserSchema + + +class WorkplaceBaseSchema(BaseModel): + """Workplace base schema.""" + + model_config = ConfigDict(from_attributes=True) + + name: str + user: Annotated[ + UserSchema | None, + RelationshipInfo( + resource_type="user", + ), + ] = None + + +class WorkplacePatchSchema(WorkplaceBaseSchema): + """Workplace PATCH schema.""" + + +class WorkplaceInSchema(WorkplaceBaseSchema): + """Workplace input schema.""" + + +class WorkplaceSchema(WorkplaceInSchema): + """Workplace item schema.""" + + model_config = ConfigDict(from_attributes=True) + + id: int diff --git a/tests/test_api/test_api_sqla_with_includes.py b/tests/test_api/test_api_sqla_with_includes.py index aec5e195..047a5ef1 100644 --- a/tests/test_api/test_api_sqla_with_includes.py +++ b/tests/test_api/test_api_sqla_with_includes.py @@ -5,21 +5,23 @@ from datetime import datetime, timezone from itertools import chain, zip_longest from json import dumps, loads -from typing import Dict, List, Literal, Set, Tuple +from typing import ( + Annotated, + Literal, +) from uuid import UUID, uuid4 import pytest from fastapi import FastAPI, status from httpx import AsyncClient -from pydantic import BaseModel, Field -from pydantic.fields import FieldInfo -from pytest import fixture, mark, param, raises # noqa PT013 -from sqlalchemy import func, select +from pydantic import BaseModel +from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.orm import InstrumentedAttribute from starlette.datastructures import QueryParams from fastapi_jsonapi.api import RoutersJSONAPI +from fastapi_jsonapi.contrib.sqla.filters import sql_filter_lower_equals +from fastapi_jsonapi.types_metadata import ClientCanSetId from fastapi_jsonapi.views.view_base import ViewBase from tests.common import is_postgres_tests from tests.fixtures.app import build_alphabet_app, build_app_custom @@ -60,8 +62,6 @@ UserSchema, ) -pytestmark = mark.asyncio - logging.basicConfig(level=logging.DEBUG) @@ -74,6 +74,7 @@ async def test_root(client: AsyncClient): assert response.status_code == status.HTTP_200_OK +@pytest.mark.usefixtures("refresh_db") async def test_get_users(app: FastAPI, client: AsyncClient, user_1: User, user_2: User): url = app.url_path_for("get_user_list") response = await client.get(url) @@ -149,28 +150,26 @@ async def test_get_users_paginated( assert response.status_code == status.HTTP_200_OK, response.text response_data = response.json() - assert response_data == { - "data": [ - { - "attributes": UserAttributesBaseSchema.model_validate(user), - "id": str(user.id), - "type": "user", - }, - ], - "jsonapi": {"version": "1.0"}, - "meta": {"count": 2, "totalPages": 2}, - } + expected_data = [ + { + "attributes": UserAttributesBaseSchema.model_validate(user).model_dump(), + "id": ViewBase.get_db_item_id(user), + "type": "user", + }, + ] + assert "data" in response_data + assert response_data["data"] == expected_data - @mark.parametrize( - "fields, expected_include", + @pytest.mark.parametrize( + ("fields", "expected_include"), [ - param( + pytest.param( [ ("fields[user]", "name,age"), ], {"name", "age"}, ), - param( + pytest.param( [ ("fields[user]", "name,age"), ("fields[user]", "email"), @@ -185,8 +184,8 @@ async def test_select_custom_fields( client: AsyncClient, user_1: User, user_2: User, - fields: List[Tuple[str, str]], - expected_include: Set[str], + fields: list[tuple[str, str]], + expected_include: set[str], ): url = app.url_path_for("get_user_list") user_1, user_2 = sorted((user_1, user_2), key=lambda x: x.id) @@ -200,13 +199,13 @@ async def test_select_custom_fields( assert response_data == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict(include=expected_include), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(include=expected_include), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, { - "attributes": UserAttributesBaseSchema.from_orm(user_2).dict(include=expected_include), - "id": str(user_2.id), + "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump(include=expected_include), + "id": ViewBase.get_db_item_id(user_2), "type": "user", }, ], @@ -234,6 +233,7 @@ async def test_select_custom_fields_with_includes( queried_user_fields = "name" queried_post_fields = "title" + # noinspection PyTypeChecker params = QueryParams( [ ("fields[user]", queried_user_fields), @@ -253,37 +253,37 @@ async def test_select_custom_fields_with_includes( assert response_data == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict( + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump( include=set(queried_user_fields.split(",")), ), "relationships": { "posts": { "data": [ { - "id": str(user_1_post.id), + "id": ViewBase.get_db_item_id(user_1_post), "type": "post", }, ], }, }, - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, { - "attributes": UserAttributesBaseSchema.from_orm(user_2).dict( + "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump( include=set(queried_user_fields.split(",")), ), "relationships": { "posts": { "data": [ { - "id": str(user_2_post.id), + "id": ViewBase.get_db_item_id(user_2_post), "type": "post", }, ], }, }, - "id": str(user_2.id), + "id": ViewBase.get_db_item_id(user_2), "type": "user", }, ], @@ -292,15 +292,15 @@ async def test_select_custom_fields_with_includes( "included": sorted( [ { - "attributes": PostAttributesBaseSchema.from_orm(user_2_post).dict( + "attributes": PostAttributesBaseSchema.model_validate(user_2_post).model_dump( include=set(queried_post_fields.split(",")), ), - "id": str(user_2_post.id), + "id": ViewBase.get_db_item_id(user_2_post), "relationships": { "comments": { "data": [ { - "id": str(user_1_comment.id), + "id": ViewBase.get_db_item_id(user_1_comment), "type": "post_comment", }, ], @@ -309,23 +309,25 @@ async def test_select_custom_fields_with_includes( "type": "post", }, { - "attributes": PostAttributesBaseSchema.from_orm(user_1_post).dict( + "attributes": PostAttributesBaseSchema.model_validate(user_1_post).model_dump( include=set(queried_post_fields.split(",")), ), - "id": str(user_1_post.id), + "id": ViewBase.get_db_item_id(user_1_post), "relationships": { - "comments": {"data": [{"id": str(user_2_comment.id), "type": "post_comment"}]}, + "comments": { + "data": [{"id": ViewBase.get_db_item_id(user_2_comment), "type": "post_comment"}], + }, }, "type": "post", }, { "attributes": {}, - "id": str(user_1_comment.id), + "id": ViewBase.get_db_item_id(user_1_comment), "type": "post_comment", }, { "attributes": {}, - "id": str(user_2_comment.id), + "id": ViewBase.get_db_item_id(user_2_comment), "type": "post_comment", }, ], @@ -333,30 +335,38 @@ async def test_select_custom_fields_with_includes( ), } + @pytest.mark.usefixtures( + "refresh_db", + ) async def test_select_custom_fields_for_includes_without_requesting_includes( self, app: FastAPI, client: AsyncClient, user_1: User, + user_2: User, ): url = app.url_path_for("get_user_list") + # noinspection PyTypeChecker params = QueryParams([("fields[post]", "title")]) response = await client.get(url, params=str(params)) assert response.status_code == status.HTTP_200_OK, response.text response_data = response.json() + users = [user_1, user_2] + assert response_data == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user).model_dump(), + "id": ViewBase.get_db_item_id(user), "type": "user", - }, + } + for user in users ], "jsonapi": {"version": "1.0"}, - "meta": {"count": 1, "totalPages": 1}, + "meta": {"count": len(users), "totalPages": 1}, } @@ -367,8 +377,8 @@ async def test_get_posts_with_users( client: AsyncClient, user_1: User, user_2: User, - user_1_posts: List[Post], - user_2_posts: List[Post], + user_1_posts: list[Post], + user_2_posts: list[Post], ): url = app.url_path_for("get_post_list") url = f"{url}?include=user" @@ -434,7 +444,7 @@ async def test_create_post_for_user( "user": { "data": { "type": "user", - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), }, }, }, @@ -452,7 +462,7 @@ async def test_create_post_for_user( "user": { "data": { "type": "user", - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), }, }, }, @@ -460,7 +470,7 @@ async def test_create_post_for_user( included = response_data["included"] assert included == [ { - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), }, @@ -486,13 +496,13 @@ async def test_create_comments_for_post( "post": { "data": { "type": "post", - "id": user_1_post.id, + "id": ViewBase.get_db_item_id(user_1_post), }, }, "author": { "data": { "type": "user", - "id": user_2.id, + "id": ViewBase.get_db_item_id(user_2), }, }, }, @@ -511,13 +521,13 @@ async def test_create_comments_for_post( "post": { "data": { "type": "post", - "id": str(user_1_post.id), + "id": ViewBase.get_db_item_id(user_1_post), }, }, "author": { "data": { "type": "user", - "id": str(user_2.id), + "id": ViewBase.get_db_item_id(user_2), }, }, }, @@ -526,12 +536,12 @@ async def test_create_comments_for_post( assert included == [ { "type": "post", - "id": str(user_1_post.id), + "id": ViewBase.get_db_item_id(user_1_post), "attributes": PostAttributesBaseSchema.model_validate(user_1_post).model_dump(), "relationships": { "user": { "data": { - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, }, @@ -539,12 +549,12 @@ async def test_create_comments_for_post( }, { "type": "user", - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), }, { "type": "user", - "id": str(user_2.id), + "id": ViewBase.get_db_item_id(user_2), "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump(), }, ] @@ -563,22 +573,24 @@ async def test_create_comment_error_no_relationship( :param user_1_post: :return: """ + # TODO: assert schema attribute is required! url = app.url_path_for("get_post_comment_list") comment_attributes = PostCommentAttributesBaseSchema( text=fake.sentence(), ).model_dump() + relationships_data = { + "post": { + "data": { + "type": "post", + "id": ViewBase.get_db_item_id(user_1_post), + }, + }, + # don't pass "author" + } comment_create = { "data": { "attributes": comment_attributes, - "relationships": { - "post": { - "data": { - "type": "post", - "id": user_1_post.id, - }, - }, - # don't pass "author" - }, + "relationships": relationships_data, }, } response = await client.post(url, json=comment_create) @@ -593,8 +605,9 @@ async def test_create_comment_error_no_relationship( "relationships", "author", ], - "msg": "field required", - "type": "value_error.missing", + "input": relationships_data, + "msg": "Field required", + "type": "missing", }, ], } @@ -623,24 +636,16 @@ async def test_create_comment_error_no_relationships_content( assert response_data == { "detail": [ { - "loc": [ - "body", - "data", - "relationships", - "post", - ], - "msg": "field required", - "type": "value_error.missing", + "input": {}, + "loc": ["body", "data", "relationships", "post"], + "msg": "Field required", + "type": "missing", }, { - "loc": [ - "body", - "data", - "relationships", - "author", - ], - "msg": "field required", - "type": "value_error.missing", + "input": {}, + "loc": ["body", "data", "relationships", "author"], + "msg": "Field required", + "type": "missing", }, ], } @@ -654,11 +659,12 @@ async def test_create_comment_error_no_relationships_field( comment_attributes = PostCommentAttributesBaseSchema( text=fake.sentence(), ).model_dump() + create_data = { + "attributes": comment_attributes, + # don't pass "relationships" at all + } comment_create = { - "data": { - "attributes": comment_attributes, - # don't pass "relationships" at all - }, + "data": create_data, } response = await client.post(url, json=comment_create) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text @@ -666,13 +672,10 @@ async def test_create_comment_error_no_relationships_field( assert response_data == { "detail": [ { - "loc": [ - "body", - "data", - "relationships", - ], - "msg": "field required", - "type": "value_error.missing", + "input": create_data, + "loc": ["body", "data", "relationships"], + "msg": "Field required", + "type": "missing", }, ], } @@ -686,8 +689,8 @@ async def test_get_users_with_all_inner_relations( user_1_bio: UserBio, user_1_posts, user_1_post_for_comments: Post, - user_2_posts: List[Post], - user_1_comments_for_u2_posts: List[PostComment], + user_2_posts: list[Post], + user_1_comments_for_u2_posts: list[PostComment], user_2_comment_for_one_u1_post: PostComment, ): """ @@ -711,7 +714,7 @@ async def test_get_users_with_all_inner_relations( assert len(users_data) == len(users) assert "included" in response_data, response_data - included: List[Dict] = response_data["included"] + included: list[dict] = response_data["included"] included_data = {association_key(data): data for data in included} @@ -839,10 +842,10 @@ async def test_select_custom_fields( assert response.status_code == status.HTTP_200_OK assert response.json() == { "data": { - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict( + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump( include=set(queried_user_fields.split(",")), ), - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, "jsonapi": {"version": "1.0"}, @@ -851,8 +854,12 @@ async def test_select_custom_fields( class TestUserWithPostsWithInnerIncludes: - @mark.parametrize( - "include, expected_relationships_inner_relations, expect_user_include", + @pytest.mark.parametrize( + ( + "include", + "expected_relationships_inner_relations", + "expect_user_include", + ), [ ( ["posts", "posts.user"], @@ -895,6 +902,7 @@ async def test_get_users_with_posts_and_inner_includes( Test if requesting `posts.user` and `posts.comments` returns posts with both `user` and `comments` """ + assert user_1_post_for_comments.id, "post required" assert user_1_posts assert user_2_comment_for_one_u1_post.author_id == user_2.id include_param = ",".join(include) @@ -909,14 +917,14 @@ async def test_get_users_with_posts_and_inner_includes( assert result_data == [ { - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": resource_type, "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), "relationships": { "posts": { "data": [ # relationship info - {"id": str(p.id), "type": "post"} + {"id": ViewBase.get_db_item_id(p), "type": "post"} # for every post for p in user_1_posts ], @@ -958,7 +966,6 @@ async def test_get_users_with_posts_and_inner_includes( for key in set(expected_includes).difference(expected_relationships_inner_relations): expected_includes.pop(key) - # XXX if not expect_user_include: expected_includes.pop("user", None) assert included_as_map == expected_includes @@ -970,29 +977,31 @@ def prepare_expected_includes( user_1_posts: list[PostComment], user_2_comment_for_one_u1_post: PostComment, ): - expected_includes = { + return { "post": [ # { - "id": str(p.id), + "id": ViewBase.get_db_item_id(p), "type": "post", "attributes": PostAttributesBaseSchema.model_validate(p).model_dump(), "relationships": { "user": { "data": { - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, }, "comments": { - "data": [ - { - "id": str(user_2_comment_for_one_u1_post.id), - "type": "post_comment", - }, - ] - if p.id == user_2_comment_for_one_u1_post.post_id - else [], + "data": ( + [ + { + "id": ViewBase.get_db_item_id(user_2_comment_for_one_u1_post), + "type": "post_comment", + }, + ] + if p.id == user_2_comment_for_one_u1_post.post_id + else [] + ), }, }, } @@ -1001,7 +1010,7 @@ def prepare_expected_includes( ], "post_comment": [ { - "id": str(user_2_comment_for_one_u1_post.id), + "id": ViewBase.get_db_item_id(user_2_comment_for_one_u1_post), "type": "post_comment", "attributes": PostCommentAttributesBaseSchema.model_validate( user_2_comment_for_one_u1_post, @@ -1009,7 +1018,7 @@ def prepare_expected_includes( "relationships": { "author": { "data": { - "id": str(user_2.id), + "id": ViewBase.get_db_item_id(user_2), "type": "user", }, }, @@ -1018,15 +1027,13 @@ def prepare_expected_includes( ], "user": [ { - "id": str(user_2.id), + "id": ViewBase.get_db_item_id(user_2), "type": "user", "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump(), }, ], } - return expected_includes - async def test_method_not_allowed(app: FastAPI, client: AsyncClient): url = app.url_path_for("get_user_list") @@ -1044,7 +1051,7 @@ async def test_get_list_view_generic(app: FastAPI, client: AsyncClient, user_1: assert len(users_data) == 1, users_data user_data = users_data[0] assert user_data["id"] == str(user_1.id) - assert user_data["attributes"] == UserAttributesBaseSchema.model_validate(user_1) + assert user_data["attributes"] == UserAttributesBaseSchema.model_validate(user_1).model_dump() async def test_get_user_not_found(app: FastAPI, client: AsyncClient): @@ -1064,6 +1071,7 @@ async def test_get_user_not_found(app: FastAPI, client: AsyncClient): } +@pytest.mark.usefixtures("refresh_db") class TestCreateObjects: async def test_create_object(self, app: FastAPI, client: AsyncClient): create_user_body = { @@ -1093,9 +1101,10 @@ async def test_create_object_with_relationship_and_fetch_include( "attributes": UserBioAttributesBaseSchema( birth_city=fake.word(), favourite_movies=fake.sentence(), + # TODO: # keys_to_ids_list={"foobar": [1, 2, 3], "spameggs": [2, 3, 4]}, ).model_dump(), - "relationships": {"user": {"data": {"type": "user", "id": user_1.id}}}, + "relationships": {"user": {"data": {"type": "user", "id": ViewBase.get_db_item_id(user_1)}}}, }, } url = app.url_path_for("get_user_bio_list") @@ -1112,7 +1121,7 @@ async def test_create_object_with_relationship_and_fetch_include( assert isinstance(included_user, dict), included_user assert included_user["type"] == "user" assert included_user["id"] == str(user_1.id) - assert included_user["attributes"] == UserAttributesBaseSchema.model_validate(user_1) + assert included_user["attributes"] == UserAttributesBaseSchema.model_validate(user_1).model_dump() async def test_create_object_with_to_many_relationship_and_fetch_include( self, @@ -1132,11 +1141,11 @@ async def test_create_object_with_to_many_relationship_and_fetch_include( "computers": { "data": [ { - "id": computer_1.id, + "id": ViewBase.get_db_item_id(computer_1), "type": "computer", }, { - "id": computer_2.id, + "id": ViewBase.get_db_item_id(computer_2), "type": "computer", }, ], @@ -1159,11 +1168,11 @@ async def test_create_object_with_to_many_relationship_and_fetch_include( "computers": { "data": [ { - "id": str(computer_1.id), + "id": ViewBase.get_db_item_id(computer_1), "type": "computer", }, { - "id": str(computer_2.id), + "id": ViewBase.get_db_item_id(computer_2), "type": "computer", }, ], @@ -1174,12 +1183,12 @@ async def test_create_object_with_to_many_relationship_and_fetch_include( "included": [ { "attributes": {"name": computer_1.name}, - "id": str(computer_1.id), + "id": ViewBase.get_db_item_id(computer_1), "type": "computer", }, { "attributes": {"name": computer_2.name}, - "id": str(computer_2.id), + "id": ViewBase.get_db_item_id(computer_2), "type": "computer", }, ], @@ -1206,18 +1215,18 @@ async def test_create_to_one_and_to_many_relationship_at_the_same_time( "computers": { "data": [ { - "id": computer_1.id, + "id": ViewBase.get_db_item_id(computer_1), "type": "computer", }, { - "id": computer_2.id, + "id": ViewBase.get_db_item_id(computer_2), "type": "computer", }, ], }, "workplace": { "data": { - "id": str(workplace_1.id), + "id": ViewBase.get_db_item_id(workplace_1), "type": "workplace", }, }, @@ -1239,18 +1248,18 @@ async def test_create_to_one_and_to_many_relationship_at_the_same_time( "computers": { "data": [ { - "id": str(computer_1.id), + "id": ViewBase.get_db_item_id(computer_1), "type": "computer", }, { - "id": str(computer_2.id), + "id": ViewBase.get_db_item_id(computer_2), "type": "computer", }, ], }, "workplace": { "data": { - "id": str(workplace_1.id), + "id": ViewBase.get_db_item_id(workplace_1), "type": "workplace", }, }, @@ -1260,17 +1269,17 @@ async def test_create_to_one_and_to_many_relationship_at_the_same_time( "included": [ { "attributes": {"name": computer_1.name}, - "id": str(computer_1.id), + "id": ViewBase.get_db_item_id(computer_1), "type": "computer", }, { "attributes": {"name": computer_2.name}, - "id": str(computer_2.id), + "id": ViewBase.get_db_item_id(computer_2), "type": "computer", }, { "attributes": {"name": workplace_1.name}, - "id": str(workplace_1.id), + "id": ViewBase.get_db_item_id(workplace_1), "type": "workplace", }, ], @@ -1331,7 +1340,7 @@ async def test_create_id_by_client(self): resource_type=resource_type, ) - new_id = str(fake.pyint(100, 999)) + new_id = str(fake.pyint(1000, 10_000)) attrs = UserAttributesBaseSchema( name=fake.name(), age=fake.pyint(), @@ -1420,7 +1429,8 @@ async def test_create_with_relationship_to_the_same_table(self): response_json = res.json() assert response_json["data"] - assert (parent_object_id := response_json["data"].get("id")) + parent_object_id = response_json["data"].get("id") + assert parent_object_id assert response_json == { "data": { "attributes": { @@ -1454,7 +1464,8 @@ async def test_create_with_relationship_to_the_same_table(self): response_json = res.json() assert response_json["data"] - assert (child_object_id := response_json["data"].get("id")) + child_object_id = response_json["data"].get("id") + assert child_object_id assert res.json() == { "data": { "attributes": {"name": "child"}, @@ -1509,13 +1520,23 @@ class ContainsTimestampAttrsSchema(BaseModel): assert res.status_code == status.HTTP_201_CREATED, res.text response_json = res.json() - assert (entity_id := response_json["data"]["id"]) + data = response_json["data"] + entity_id = data["id"] + assert entity_id + + received_attributes = data.pop("attributes") + assert ( + # rec + ContainsTimestampAttrsSchema(**received_attributes) + == + # ex + ContainsTimestampAttrsSchema(timestamp=create_timestamp) + ) assert response_json == { "meta": None, "jsonapi": {"version": "1.0"}, "data": { "type": resource_type, - "attributes": {"timestamp": create_timestamp.isoformat()}, "id": entity_id, }, } @@ -1528,6 +1549,7 @@ class ContainsTimestampAttrsSchema(BaseModel): if is_postgres_tests(): expected_response_timestamp = create_timestamp.replace().isoformat() + # ... ? params = { "filter": json.dumps( [ @@ -1583,7 +1605,7 @@ async def test_select_custom_fields(self, app: FastAPI, client: AsyncClient): ) create_user_body = { "data": { - "attributes": user_attrs_schema.dict(), + "attributes": user_attrs_schema.model_dump(), }, } queried_user_fields = "name" @@ -1597,7 +1619,7 @@ async def test_select_custom_fields(self, app: FastAPI, client: AsyncClient): assert response_data["data"].pop("id") assert response_data == { "data": { - "attributes": user_attrs_schema.dict(include=set(queried_user_fields.split(","))), + "attributes": user_attrs_schema.model_dump(include=set(queried_user_fields.split(","))), "type": "user", }, "jsonapi": {"version": "1.0"}, @@ -1620,7 +1642,7 @@ async def test_patch_object( patch_user_body = { "data": { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "attributes": new_attrs, }, } @@ -1631,7 +1653,7 @@ async def test_patch_object( assert res.json() == { "data": { "attributes": new_attrs, - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, "jsonapi": {"version": "1.0"}, @@ -1662,7 +1684,7 @@ class UserPatchSchemaWithExtraAttribute(UserPatchSchema): patch_user_body = { "data": { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "attributes": new_attrs, }, } @@ -1689,7 +1711,7 @@ async def test_update_schema_has_extra_fields(self, user_1: User, caplog): create_body = { "data": { "attributes": new_attributes.model_dump(), - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), }, } @@ -1701,7 +1723,7 @@ async def test_update_schema_has_extra_fields(self, user_1: User, caplog): assert res.json() == { "data": { "attributes": UserAttributesBaseSchema(**new_attributes.model_dump()).model_dump(), - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": resource_type, }, "jsonapi": {"version": "1.0"}, @@ -1730,8 +1752,8 @@ async def test_select_custom_fields( patch_user_body = { "data": { - "id": user_1.id, - "attributes": new_attrs.dict(), + "id": ViewBase.get_db_item_id(user_1), + "attributes": new_attrs.model_dump(), }, } queried_user_fields = "name" @@ -1742,15 +1764,15 @@ async def test_select_custom_fields( assert res.status_code == status.HTTP_200_OK, res.text assert res.json() == { "data": { - "attributes": new_attrs.dict(include=set(queried_user_fields.split(","))), - "id": str(user_1.id), + "attributes": new_attrs.model_dump(include=set(queried_user_fields.split(","))), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, "jsonapi": {"version": "1.0"}, "meta": None, } - @mark.parametrize("check_type", ["ok", "fail"]) + @pytest.mark.parametrize("check_type", ["ok", "fail"]) async def test_update_to_many_relationships(self, async_session: AsyncSession, check_type: Literal["ok", "fail"]): resource_type = "cascade_case" with suppress(KeyError): @@ -1787,18 +1809,18 @@ async def test_update_to_many_relationships(self, async_session: AsyncSession, c update_body = { "type": resource_type, "data": { - "id": new_top_item.id, + "id": ViewBase.get_db_item_id(new_top_item), "attributes": {}, "relationships": { "sub_items": { "data": [ { "type": resource_type, - "id": sub_item_1.id, + "id": ViewBase.get_db_item_id(sub_item_1), }, { "type": resource_type, - "id": sub_item_2.id, + "id": ViewBase.get_db_item_id(sub_item_2), }, ], }, @@ -1847,16 +1869,17 @@ async def test_ok_when_foreign_key_of_related_object_is_nullable( email=fake.email(), ).model_dump() + workplace_data = { + "type": "workplace", + "id": ViewBase.get_db_item_id(workplace_1), + } patch_user_body = { "data": { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "attributes": new_attrs, "relationships": { "workplace": { - "data": { - "type": "workplace", - "id": workplace_1.id, - }, + "data": workplace_data, }, }, }, @@ -1871,12 +1894,12 @@ async def test_ok_when_foreign_key_of_related_object_is_nullable( assert res.json() == { "data": { "attributes": new_attrs, - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "relationships": { "workplace": { "data": { "type": "workplace", - "id": str(workplace_1.id), + "id": ViewBase.get_db_item_id(workplace_1), }, }, }, @@ -1885,7 +1908,7 @@ async def test_ok_when_foreign_key_of_related_object_is_nullable( "included": [ { "attributes": {"name": workplace_1.name}, - "id": str(workplace_1.id), + "id": ViewBase.get_db_item_id(workplace_1), "type": "workplace", }, ], @@ -1893,7 +1916,7 @@ async def test_ok_when_foreign_key_of_related_object_is_nullable( "meta": None, } - patch_user_body["data"]["relationships"]["workplace"]["data"]["id"] = workplace_2.id + workplace_data["id"] = ViewBase.get_db_item_id(workplace_2) # update relationship with patch endpoint res = await client.patch(url, json=patch_user_body) @@ -1902,12 +1925,12 @@ async def test_ok_when_foreign_key_of_related_object_is_nullable( assert res.json() == { "data": { "attributes": new_attrs, - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "relationships": { "workplace": { "data": { "type": "workplace", - "id": str(workplace_2.id), + "id": ViewBase.get_db_item_id(workplace_2), }, }, }, @@ -1916,7 +1939,7 @@ async def test_ok_when_foreign_key_of_related_object_is_nullable( "included": [ { "attributes": {"name": workplace_2.name}, - "id": str(workplace_2.id), + "id": ViewBase.get_db_item_id(workplace_2), "type": "workplace", }, ], @@ -1938,13 +1961,13 @@ async def test_fail_to_bind_relationship_with_constraint( patch_user_bio_body = { "data": { - "id": user_1_bio.id, + "id": ViewBase.get_db_item_id(user_1_bio), "attributes": UserBioAttributesBaseSchema.model_validate(user_1_bio).model_dump(), "relationships": { "user": { "data": { "type": "user", - "id": user_2.id, + "id": ViewBase.get_db_item_id(user_2), }, }, }, @@ -1963,13 +1986,14 @@ async def test_fail_to_bind_relationship_with_constraint( "status_code": status.HTTP_400_BAD_REQUEST, "title": "Bad Request", "meta": { - "id": str(user_1_bio.id), + "id": ViewBase.get_db_item_id(user_1_bio), "type": "user_bio", }, }, ], } + @pytest.mark.usefixtures("refresh_db") async def test_relationship_not_found( self, app: FastAPI, @@ -1985,7 +2009,7 @@ async def test_relationship_not_found( fake_relationship_id = "1" patch_user_body = { "data": { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "attributes": new_attrs, "relationships": { "workplace": { @@ -2021,11 +2045,10 @@ async def test_update_resource_error_same_id( client: AsyncClient, user_1: User, ): - user_id = user_1.id another_id = 0 patch_user_body = { "data": { - "id": user_id, + "id": ViewBase.get_db_item_id(user_1), "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), }, } @@ -2066,7 +2089,7 @@ async def test_remove_to_one_relationship_using_by_update(self, async_session: A expected_name = fake.name() update_body = { "data": { - "id": str(child_obj.id), + "id": ViewBase.get_db_item_id(child_obj), "attributes": { "name": expected_name, }, @@ -2085,8 +2108,8 @@ async def test_remove_to_one_relationship_using_by_update(self, async_session: A assert res.status_code == status.HTTP_200_OK, res.text assert res.json() == { "data": { - "attributes": SelfRelationshipAttributesSchema(name=expected_name).dict(), - "id": str(child_obj.id), + "attributes": SelfRelationshipAttributesSchema(name=expected_name).model_dump(exclude_unset=True), + "id": ViewBase.get_db_item_id(child_obj), "relationships": {"parent_object": {"data": None}}, "type": "self_relationship", }, @@ -2116,20 +2139,18 @@ async def test_ok( patch_user_body = { "data": { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "attributes": new_attrs, "relationships": { "computers": { "data": [ { "type": "computer", - # test id as int - "id": computer_1.id, + "id": ViewBase.get_db_item_id(computer_1), }, { "type": "computer", - # test id as str - "id": str(computer_2.id), + "id": ViewBase.get_db_item_id(computer_2), }, ], }, @@ -2145,17 +2166,17 @@ async def test_ok( assert res.json() == { "data": { "attributes": new_attrs, - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "relationships": { "computers": { "data": [ { "type": "computer", - "id": str(computer_1.id), + "id": ViewBase.get_db_item_id(computer_1), }, { "type": "computer", - "id": str(computer_2.id), + "id": ViewBase.get_db_item_id(computer_2), }, ], }, @@ -2165,12 +2186,12 @@ async def test_ok( "included": [ { "attributes": {"name": computer_1.name}, - "id": str(computer_1.id), + "id": ViewBase.get_db_item_id(computer_1), "type": "computer", }, { "attributes": {"name": computer_2.name}, - "id": str(computer_2.id), + "id": ViewBase.get_db_item_id(computer_2), "type": "computer", }, ], @@ -2182,7 +2203,7 @@ async def test_ok( "data": [ { "type": "computer", - "id": str(computer_1.id), + "id": ViewBase.get_db_item_id(computer_1), }, ], } @@ -2194,13 +2215,13 @@ async def test_ok( assert res.json() == { "data": { "attributes": new_attrs, - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "relationships": { "computers": { "data": [ { "type": "computer", - "id": str(computer_1.id), + "id": ViewBase.get_db_item_id(computer_1), }, ], }, @@ -2210,7 +2231,7 @@ async def test_ok( "included": [ { "attributes": {"name": computer_1.name}, - "id": str(computer_1.id), + "id": ViewBase.get_db_item_id(computer_1), "type": "computer", }, ], @@ -2237,18 +2258,18 @@ async def test_relationship_not_found( patch_user_body = { "data": { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "attributes": new_attrs, "relationships": { "computers": { "data": [ { "type": "computer", - "id": str(computer_1.id), + "id": ViewBase.get_db_item_id(computer_1), }, { "type": "computer", - "id": fake_computer_id, + "id": str(fake_computer_id), }, ], }, @@ -2292,19 +2313,20 @@ async def test_remove_to_many_relationship_using_by_update(self, async_session: assert child_obj_1.self_relationship_id == parent_obj.id assert child_obj_2.self_relationship_id == parent_obj.id - assert len(parent_obj.children_objects) == 2 # noqa PLR2004 + assert len(parent_obj.children_objects) == 2 # noqa: PLR2004 async with AsyncClient(app=app, base_url="http://test") as client: expected_name = fake.name() update_body = { "data": { - "id": str(parent_obj.id), + "id": ViewBase.get_db_item_id(parent_obj), "attributes": { "name": expected_name, }, "relationships": { "children_objects": { - "data": None, + # clear by setting empty list + "data": [], }, }, }, @@ -2317,8 +2339,8 @@ async def test_remove_to_many_relationship_using_by_update(self, async_session: assert res.status_code == status.HTTP_200_OK, res.text assert res.json() == { "data": { - "attributes": SelfRelationshipAttributesSchema(name=expected_name).dict(), - "id": str(parent_obj.id), + "attributes": SelfRelationshipAttributesSchema(name=expected_name).model_dump(exclude_unset=True), + "id": ViewBase.get_db_item_id(parent_obj), "relationships": {"children_objects": {"data": []}}, "type": "self_relationship", }, @@ -2386,13 +2408,13 @@ async def test_delete_objects_many( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.model_validate(user_1), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, { - "attributes": UserAttributesBaseSchema.model_validate(user_3), - "id": str(user_3.id), + "attributes": UserAttributesBaseSchema.model_validate(user_3).model_dump(), + "id": ViewBase.get_db_item_id(user_3), "type": "user", }, ], @@ -2405,8 +2427,8 @@ async def test_delete_objects_many( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.model_validate(user_2), - "id": str(user_2.id), + "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump(), + "id": ViewBase.get_db_item_id(user_2), "type": "user", }, ], @@ -2429,17 +2451,17 @@ async def test_select_custom_fields( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict( + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump( include=set(queried_user_fields.split(",")), ), - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, { - "attributes": UserAttributesBaseSchema.from_orm(user_2).dict( + "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump( include=set(queried_user_fields.split(",")), ), - "id": str(user_2.id), + "id": ViewBase.get_db_item_id(user_2), "type": "user", }, ], @@ -2501,7 +2523,7 @@ async def test_openapi_endpoint_ok(self, client: AsyncClient, app: FastAPI): async def test_openapi_for_client_can_set_id(self): class Schema(BaseModel): - id: UUID = Field(json_schema_extra={"client_can_set_id": True}) + id: Annotated[UUID, ClientCanSetId()] app = build_app_custom( model=User, @@ -2535,7 +2557,7 @@ async def test_filters_really_works( "meta": {"count": 0, "totalPages": 1}, } - @mark.parametrize("field_name", [param(name, id=name) for name in ["id", "name", "age", "email"]]) + @pytest.mark.parametrize("field_name", [pytest.param(name, id=name) for name in ["id", "name", "age", "email"]]) async def test_field_filters( self, app: FastAPI, @@ -2555,7 +2577,7 @@ async def test_field_filters( "data": [ { "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, ], @@ -2587,7 +2609,7 @@ async def test_several_field_filters_at_the_same_time( "data": [ { "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, ], @@ -2616,11 +2638,11 @@ async def test_field_filters_with_values_from_different_models( "meta": {"count": 0, "totalPages": 1}, } - @mark.parametrize( + @pytest.mark.parametrize( ("filter_dict", "expected_email_is_null"), [ - param([{"name": "email", "op": "is_", "val": None}], True), - param([{"name": "email", "op": "isnot", "val": None}], False), + pytest.param([{"name": "email", "op": "is_", "val": None}], True), + pytest.param([{"name": "email", "op": "isnot", "val": None}], False), ], ) async def test_filter_by_null( @@ -2646,7 +2668,8 @@ async def test_filter_by_null( response_json = response.json() - assert len(data := response_json["data"]) == 1 + data = response_json["data"] + assert len(data) == 1 assert data[0]["id"] == str(target_user.id) assert data[0]["attributes"]["email"] == target_user.email @@ -2707,20 +2730,8 @@ async def test_custom_sql_filter_lower_string( assert user_1.id != user_2.id - def lower_equals_sql_filter( - schema_field: FieldInfo, - model_column: InstrumentedAttribute, - value: str, - operator: str, - ): - return func.lower(model_column) == func.lower(value) - class UserWithEmailFieldSchema(UserAttributesBaseSchema): - email: str = Field( - json_schema_extra={ - "_lower_equals_sql_filter_": lower_equals_sql_filter, - }, - ) + email: Annotated[str, sql_filter_lower_equals] app = build_app_custom( model=User, @@ -2750,14 +2761,13 @@ class UserWithEmailFieldSchema(UserAttributesBaseSchema): assert len(response_data) == 1 assert response_data[0] == { - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": resource_type, "attributes": UserWithEmailFieldSchema.model_validate(user_1).model_dump(), } async def test_custom_sql_filter_lower_string_old_style_with_joins( self, - caplog, async_session: AsyncSession, user_1: User, user_2: User, @@ -2766,20 +2776,8 @@ async def test_custom_sql_filter_lower_string_old_style_with_joins( assert user_1.id != user_2.id - def lower_equals_sql_filter( - schema_field: FieldInfo, - model_column: InstrumentedAttribute, - value: str, - operator: str, - ): - return func.lower(model_column) == func.lower(value), [] - class UserWithEmailFieldFilterSchema(UserAttributesBaseSchema): - email: str = Field( - json_schema_extra={ - "_lower_equals_sql_filter_": lower_equals_sql_filter, - }, - ) + email: Annotated[str, sql_filter_lower_equals] app = build_app_custom( model=User, @@ -2809,39 +2807,16 @@ class UserWithEmailFieldFilterSchema(UserAttributesBaseSchema): assert len(response_data) == 1 assert response_data[0] == { - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": resource_type, "attributes": UserWithEmailFieldFilterSchema.model_validate(user_1).model_dump(), } - assert any( - # str from logs - "Please return only filter expression from now on" in record.msg - # check all records - for record in caplog.records - ) - async def test_custom_sql_filter_invalid_result( - self, - caplog, - async_session: AsyncSession, - user_1: User, - ): + async def test_custom_sql_filter_doesnt_exist(self): resource_type = "user_with_custom_invalid_sql_filter" - def returns_invalid_number_of_params_filter( - schema_field: FieldInfo, - model_column: InstrumentedAttribute, - value: str, - operator: str, - ): - return 1, 2, 3 - class UserWithInvalidEmailFieldFilterSchema(UserAttributesBaseSchema): - email: str = Field( - json_schema_extra={ - "_custom_broken_filter_sql_filter_": returns_invalid_number_of_params_filter, - }, - ) + email: str app = build_app_custom( model=User, @@ -2849,12 +2824,14 @@ class UserWithInvalidEmailFieldFilterSchema(UserAttributesBaseSchema): resource_type=resource_type, ) + field_name = "email" + field_op = "custom_broken_filter" params = { "filter": dumps( [ { - "name": "email", - "op": "custom_broken_filter", + "name": field_name, + "op": field_op, "val": "qwerty", }, ], @@ -2867,7 +2844,7 @@ class UserWithInvalidEmailFieldFilterSchema(UserAttributesBaseSchema): assert response.json() == { "errors": [ { - "detail": "Custom sql filter backend error.", + "detail": f"Field {field_name!r} has no operator {field_op!r}", "source": {"parameter": "filters"}, "status_code": status.HTTP_400_BAD_REQUEST, "title": "Invalid filters querystring parameter.", @@ -2883,6 +2860,7 @@ async def test_composite_filter_by_one_field( user_2: User, user_3: User, ): + assert user_2.id, "user 2 should exist" params = { "filter": dumps( [ @@ -2904,13 +2882,13 @@ async def test_composite_filter_by_one_field( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.model_validate(user_1), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, { - "attributes": UserAttributesBaseSchema.model_validate(user_3), - "id": str(user_3.id), + "attributes": UserAttributesBaseSchema.model_validate(user_3).model_dump(), + "id": ViewBase.get_db_item_id(user_3), "type": "user", }, ], @@ -2926,6 +2904,7 @@ async def test_composite_filter_by_several_fields( user_2: User, user_3: User, ): + assert user_2.id, "user 2 should exist" params = { "filter": dumps( [ @@ -2952,8 +2931,8 @@ async def test_composite_filter_by_several_fields( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.model_validate(user_1), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, ], @@ -2981,7 +2960,7 @@ async def test_composite_filter_with_mutually_exclusive_conditions( ], }, { - "name": "name", + "name": "id", "op": "eq", "val": user_2.id, }, @@ -3063,18 +3042,18 @@ async def test_filter_with_nested_conditions( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.model_validate(user_1), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, { - "attributes": UserAttributesBaseSchema.model_validate(user_2), - "id": str(user_2.id), + "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump(), + "id": ViewBase.get_db_item_id(user_2), "type": "user", }, { - "attributes": UserAttributesBaseSchema.model_validate(user_4), - "id": str(user_4.id), + "attributes": UserAttributesBaseSchema.model_validate(user_4).model_dump(), + "id": ViewBase.get_db_item_id(user_4), "type": "user", }, ], @@ -3122,8 +3101,8 @@ async def test_join_by_relationships_does_not_duplicating_response_entities( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.model_validate(user_1), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, ], @@ -3372,7 +3351,9 @@ async def test_join_by_relationships_for_one_model_by_different_join_chains( assert response.status_code == status.HTTP_200_OK, response.text assert response.json() == { - "data": [{"attributes": {}, "id": str(alpha_1.id), "type": "alpha"}], + "data": [ + {"attributes": {}, "id": ViewBase.get_db_item_id(alpha_1), "type": "alpha"}, + ], "jsonapi": {"version": "1.0"}, "meta": {"count": 1, "totalPages": 1}, } @@ -3386,11 +3367,11 @@ class TestSorts: def get_reverse(self, order: str) -> bool: return order is DESCENDING - @mark.parametrize( + @pytest.mark.parametrize( "order", [ - param(ASCENDING, id="ascending"), - param(DESCENDING, id="descending"), + pytest.param(ASCENDING, id="ascending"), + pytest.param(DESCENDING, id="descending"), ], ) async def test_sort( @@ -3429,12 +3410,12 @@ async def test_sort( [ { "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, { "attributes": UserAttributesBaseSchema.model_validate(user_3).model_dump(), - "id": str(user_3.id), + "id": ViewBase.get_db_item_id(user_3), "type": "user", }, ], @@ -3478,4 +3459,4 @@ async def test_incorrect_field_name( } -# todo: test errors +# TODO: test errors diff --git a/tests/test_api/test_custom_body_dependency.py b/tests/test_api/test_custom_body_dependency.py index 5c6c212b..bb835c69 100644 --- a/tests/test_api/test_custom_body_dependency.py +++ b/tests/test_api/test_custom_body_dependency.py @@ -1,9 +1,8 @@ -from typing import ClassVar, Dict, Literal +from typing import ClassVar, Literal import pytest from fastapi import Body, Depends, FastAPI, HTTPException, status from httpx import AsyncClient -from pytest_asyncio import fixture from sqlalchemy.ext.asyncio import AsyncSession from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBaseGeneric, ListViewBaseGeneric @@ -24,7 +23,28 @@ UserSchema, ) -pytestmark = pytest.mark.asyncio + +@pytest.fixture(scope="class") +def resource_type(): + return "user_w_custom_deps_for_generic" + + +@pytest.fixture(scope="class") +def app_w_deps(resource_type): + return build_app_custom( + model=User, + schema=UserSchema, + resource_type=resource_type, + class_list=UserCustomListView, + class_detail=UserCustomDetailView, + path=f"/path_{resource_type}", + ) + + +@pytest.fixture() +async def client(app_w_deps: FastAPI): + async with AsyncClient(app=app_w_deps, base_url="http://test") as client: + yield client def get_custom_name_from_body( @@ -72,7 +92,7 @@ class UserUpdateCustomDependency(ArbitraryModelBase): class UserCustomListView(ListViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { + method_dependencies: ClassVar[dict[HTTPMethod, HTTPMethodConfig]] = { HTTPMethod.ALL: HTTPMethodConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=common_handler, @@ -84,7 +104,7 @@ class UserCustomListView(ListViewBaseGeneric): class UserCustomDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { + method_dependencies: ClassVar[dict[HTTPMethod, HTTPMethodConfig]] = { HTTPMethod.ALL: HTTPMethodConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=common_handler, @@ -95,33 +115,13 @@ class UserCustomDetailView(DetailViewBaseGeneric): } +@pytest.mark.usefixtures("refresh_db") class TestGenericUserCreateUpdateWithBodyDependency( BaseGenericUserCreateUpdateWithBodyDependency, ): validator_create = validator_create validator_update = validator_update - @pytest.fixture(scope="class") - def resource_type(self): - return "user_w_custom_deps_for_generic" - - @pytest.fixture(scope="class") - def app_w_deps(self, resource_type): - app = build_app_custom( - model=User, - schema=UserSchema, - resource_type=resource_type, - class_list=UserCustomListView, - class_detail=UserCustomDetailView, - path=f"/path_{resource_type}", - ) - return app - - @fixture(scope="class") - async def client(self, app_w_deps: FastAPI): - async with AsyncClient(app=app_w_deps, base_url="http://test") as client: - yield client - async def test_generic_create_validation_error_key_not_passed( self, app_w_deps: FastAPI, diff --git a/tests/test_api/test_routers.py b/tests/test_api/test_routers.py index dad6d152..2f93c0d6 100644 --- a/tests/test_api/test_routers.py +++ b/tests/test_api/test_routers.py @@ -1,12 +1,13 @@ -from typing import ClassVar, Dict, Optional +from __future__ import annotations +from typing import Annotated, ClassVar + +import pytest from fastapi import APIRouter, Depends, FastAPI, Header, Path, status from httpx import AsyncClient from pydantic import BaseModel, ConfigDict -from pytest import mark # noqa from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession -from typing_extensions import Annotated from fastapi_jsonapi import RoutersJSONAPI, init from fastapi_jsonapi.exceptions import Forbidden, InternalServerError @@ -27,8 +28,6 @@ UserSchema, ) -pytestmark = mark.asyncio - def build_app(detail_view, resource_type: str) -> FastAPI: app = FastAPI( @@ -69,7 +68,7 @@ class CustomDependencies(BaseModel): dependency_1: int = Depends(one) dependency_2: int = Depends(two) - async def dependencies_handler(view_base: ViewBase, dto: CustomDependencies) -> Optional[Dict]: + async def dependencies_handler(view_base: ViewBase, dto: CustomDependencies) -> dict | None: raise InternalServerError( detail="hi", errors=[ @@ -87,7 +86,7 @@ async def dependencies_handler(view_base: ViewBase, dto: CustomDependencies) -> ) class DependencyInjectionDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { + method_dependencies: ClassVar[dict[HTTPMethod, HTTPMethodConfig]] = { HTTPMethod.GET: HTTPMethodConfig( dependencies=CustomDependencies, prepare_data_layer_kwargs=dependencies_handler, @@ -129,7 +128,7 @@ async def check_that_user_is_admin(x_auth: Annotated[str, Header()]): raise Forbidden(detail="Only admin user have permissions to this endpoint") class AdminOnlyPermission(BaseModel): - is_admin: Optional[bool] = Depends(check_that_user_is_admin) + is_admin: bool | None = Depends(check_that_user_is_admin) def get_path_obj_id(obj_id: int = Path(default=...)): return obj_id @@ -137,13 +136,13 @@ def get_path_obj_id(obj_id: int = Path(default=...)): class DetailGenericDependency(SessionDependency): custom_name_obj_id: int = Depends(get_path_obj_id) - def all_handler(view: ViewBase, dto: DetailGenericDependency) -> Dict: + def all_handler(view: ViewBase, dto: DetailGenericDependency) -> dict: # test inside handler assert dto.custom_name_obj_id == int(view.request.path_params["obj_id"]) return {"session": dto.session} class DependencyInjectionDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { + method_dependencies: ClassVar[dict[HTTPMethod, HTTPMethodConfig]] = { HTTPMethod.GET: HTTPMethodConfig(dependencies=AdminOnlyPermission), HTTPMethod.ALL: HTTPMethodConfig( dependencies=DetailGenericDependency, @@ -187,7 +186,7 @@ class GetDetailDependencies(BaseModel): session: AsyncSession = Depends(async_session_dependency) model_config = ConfigDict(arbitrary_types_allowed=True) - async def set_session_and_ignore_user_1(view_base: ViewBase, dto: GetDetailDependencies) -> Dict: + async def set_session_and_ignore_user_1(view_base: ViewBase, dto: GetDetailDependencies) -> dict: # noqa: ARG001 query = select(User).where(User.id != user_1.id) return { @@ -196,7 +195,7 @@ async def set_session_and_ignore_user_1(view_base: ViewBase, dto: GetDetailDepen } class DependencyInjectionDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { + method_dependencies: ClassVar[dict[HTTPMethod, HTTPMethodConfig]] = { HTTPMethod.GET: HTTPMethodConfig( dependencies=GetDetailDependencies, prepare_data_layer_kwargs=set_session_and_ignore_user_1, diff --git a/tests/test_api/test_validators.py b/tests/test_api/test_validators.py index 91898c49..630e756f 100644 --- a/tests/test_api/test_validators.py +++ b/tests/test_api/test_validators.py @@ -1,30 +1,64 @@ +from __future__ import annotations + from copy import deepcopy -from typing import Dict, List, Optional, Set, Type +from typing import ( + TYPE_CHECKING, + Annotated, + Callable, + NoReturn, +) import pytest from fastapi import FastAPI, status from httpx import AsyncClient -from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator -from pytest import mark, param # noqa: PT013 -from pytest_asyncio import fixture -from sqlalchemy.ext.asyncio import AsyncSession +from pydantic import ( + AfterValidator, + BeforeValidator, + ConfigDict, + field_validator, + model_validator, +) +from pydantic import ( + BaseModel as PydanticBaseModel, +) +from pydantic_core.core_schema import ValidationInfo from fastapi_jsonapi import RoutersJSONAPI -from fastapi_jsonapi.exceptions import BadRequest +from fastapi_jsonapi.schema import BaseModel from fastapi_jsonapi.schema_builder import SchemaBuilder -from fastapi_jsonapi.validation_utils import extract_field_validators +from fastapi_jsonapi.types_metadata import ClientCanSetId +from fastapi_jsonapi.validation_utils import extract_validators +from fastapi_jsonapi.views.view_base import ViewBase from tests.fixtures.app import build_app_custom from tests.misc.utils import fake from tests.models import ( Task, User, ) -from tests.schemas import TaskBaseSchema +from tests.schemas import TaskBaseSchema, UserAttributesBaseSchema + +if TYPE_CHECKING: + from sqlalchemy.ext.asyncio import AsyncSession + + +@pytest.fixture() +def refresh_caches() -> None: + object_schemas_cache = deepcopy(SchemaBuilder.object_schemas_cache) + relationship_schema_cache = deepcopy(SchemaBuilder.relationship_schema_cache) + base_jsonapi_object_schemas_cache = deepcopy(SchemaBuilder.base_jsonapi_object_schemas_cache) + + all_jsonapi_routers = deepcopy(RoutersJSONAPI.all_jsonapi_routers) + + yield -pytestmark = pytest.mark.asyncio + SchemaBuilder.object_schemas_cache = object_schemas_cache + SchemaBuilder.relationship_schema_cache = relationship_schema_cache + SchemaBuilder.base_jsonapi_object_schemas_cache = base_jsonapi_object_schemas_cache + RoutersJSONAPI.all_jsonapi_routers = all_jsonapi_routers -@fixture() + +@pytest.fixture() async def task_with_none_ids( async_session: AsyncSession, ) -> Task: @@ -40,6 +74,179 @@ def resource_type(): return "task" +@pytest.fixture() +def format_error() -> Callable[[str], str]: + unique_marker = fake.word() + + def _format_error(v) -> str: + return f"[{unique_marker}] Check validator for {v}" + + return _format_error + + +@pytest.fixture() +def reformat_error(format_error) -> Callable[[str, str], str]: + def _reformat_error(marker, v) -> str: + return f"[{marker}] {format_error(v)}" + + return _reformat_error + + +@pytest.mark.usefixtures("refresh_db", "refresh_caches") +class TestAnnotatedBeforeAndAfterValidators: + @pytest.mark.parametrize("validator", [BeforeValidator, AfterValidator]) + async def test_validator_annotated( + self, + async_session: AsyncSession, + validator: type[BeforeValidator | AfterValidator], + ) -> None: + def mod_name(v: str) -> str: + return v.title() + + def mod_age(v: int) -> int: + return v * 2 + + class UserAnnotatedFieldsSchema(UserAttributesBaseSchema): + name: Annotated[str, validator(mod_name)] + age: Annotated[int, validator(mod_age)] + + r_type = fake.word() + fake.word() + app = build_app_custom( + model=User, + schema=UserAnnotatedFieldsSchema, + resource_type=r_type, + ) + + raw_name = fake.name().lower() + # raw_age = fake.pyint(min_value=13, max_value=99) + raw_age = 80 + + user_attrs = { + "name": raw_name, + "age": raw_age, + } + create_user_body = { + "data": { + "attributes": user_attrs, + }, + } + + async with AsyncClient(app=app, base_url="http://test") as client: + url = app.url_path_for(f"create_{r_type}_list") + res = await client.post(url, json=create_user_body) + assert res.status_code == status.HTTP_201_CREATED, res.text + response_json = res.json() + + assert "data" in response_json + data = response_json["data"] + obj_id = data["id"] + obj = await async_session.get(User, int(obj_id)) + + assert data["type"] == r_type + attributes = data["attributes"] + user_name = attributes["name"] + assert user_name != raw_name, attributes + assert user_name == mod_name(raw_name), attributes + user_age = attributes["age"] + assert user_age != raw_age, attributes + expected_age_in_db_after_deserialize = mod_age(raw_age) + meta = { + "raw age": raw_age, + "user age": obj.age, + "user id": obj.id, + "user name": obj.name, + } + assert obj.age == expected_age_in_db_after_deserialize, meta + expected_age_after_preparing_result = mod_age(expected_age_in_db_after_deserialize) + assert user_age == expected_age_after_preparing_result, (attributes, meta) + + @pytest.mark.parametrize("validator", [BeforeValidator, AfterValidator]) + async def test_id_validator_annotated( + self, + validator: type[BeforeValidator | AfterValidator], + format_error, + ): + def validate_id_raise(v: str) -> NoReturn: + raise ValueError(format_error(v)) + + class UserAnnotatedIdValidatorSchema(UserAttributesBaseSchema): + id: Annotated[int, ClientCanSetId(), validator(validate_id_raise)] + + r_type = fake.word() + fake.word() + app = build_app_custom( + model=User, + schema=UserAnnotatedIdValidatorSchema, + resource_type=r_type, + ) + + user_attrs = { + "name": fake.name(), + } + new_user_id = fake.pyint(min_value=1000, max_value=10_000) + create_user_body = { + "data": { + "attributes": user_attrs, + "id": str(new_user_id), + }, + } + + async with AsyncClient(app=app, base_url="http://test") as client: + url = app.url_path_for(f"create_{r_type}_list") + res = await client.post(url, json=create_user_body) + assert res.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, res.text + response_json = res.json() + + assert "detail" in response_json, response_json + detail = response_json["detail"][0] + assert detail["loc"] == ["body", "data", "id"] + assert detail["msg"].endswith(format_error(new_user_id)), detail["msg"] + + @pytest.mark.parametrize("validator", [BeforeValidator, AfterValidator]) + async def test_validator_annotated_sequence_arg( + self, + validator: type[BeforeValidator | AfterValidator], + format_error, + ): + flag_name = fake.name() + + def validate_name(v): + for item in v: + if item == flag_name: + raise ValueError(format_error(item)) + + class UserAnnotatedSequenceNamesSchema(UserAttributesBaseSchema): + names: Annotated[list[str], validator(validate_name)] + + r_type = fake.word() + fake.word() + app = build_app_custom( + model=User, + schema=UserAnnotatedSequenceNamesSchema, + resource_type=r_type, + ) + + user_attrs = { + "names": [fake.name(), flag_name], + "name": fake.name(), + } + create_user_body = { + "data": { + "attributes": user_attrs, + }, + } + + async with AsyncClient(app=app, base_url="http://test") as client: + url = app.url_path_for(f"create_{r_type}_list") + res = await client.post(url, json=create_user_body) + assert res.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, res.text + response_json = res.json() + + assert "detail" in response_json, response_json + detail = response_json["detail"][0] + assert detail["loc"] == ["body", "data", "attributes", "names"] + assert detail["msg"].endswith(format_error(flag_name)), detail["msg"] + + +@pytest.mark.usefixtures("refresh_db") class TestTaskValidators: async def test_base_model_validator_pre_true_get_one( self, @@ -49,6 +256,7 @@ async def test_base_model_validator_pre_true_get_one( task_with_none_ids: Task, ): assert task_with_none_ids.task_ids is None + assert task_with_none_ids.another_task_ids is None url = app.url_path_for(f"get_{resource_type}_detail", obj_id=task_with_none_ids.id) res = await client.get(url) assert res.status_code == status.HTTP_200_OK, res.text @@ -56,20 +264,21 @@ async def test_base_model_validator_pre_true_get_one( attributes = response_data["data"].pop("attributes") assert response_data == { "data": { - "id": str(task_with_none_ids.id), + "id": ViewBase.get_db_item_id(task_with_none_ids), "type": resource_type, + # dont' pass fields at all }, "jsonapi": {"version": "1.0"}, "meta": None, } assert attributes == { # not `None`! schema validator returns empty list `[]` - # "task_ids": None, "task_ids": [], + "another_task_ids": [], } - assert attributes == TaskBaseSchema.model_validate(task_with_none_ids) + assert attributes == TaskBaseSchema.model_validate(task_with_none_ids).model_dump() - async def test_base_model_root_validator_get_list( + async def test_base_model_model_validator_get_list( self, app: FastAPI, client: AsyncClient, @@ -77,32 +286,25 @@ async def test_base_model_root_validator_get_list( task_with_none_ids: Task, ): assert task_with_none_ids.task_ids is None + assert task_with_none_ids.another_task_ids is None url = app.url_path_for(f"get_{resource_type}_list") res = await client.get(url) assert res.status_code == status.HTTP_200_OK, res.text response_data = res.json() - assert response_data == { - "data": [ - { - "id": str(task_with_none_ids.id), - "type": resource_type, - "attributes": { - # not `None`! schema validator returns empty list `[]` - # "task_ids": None, - "task_ids": [], - }, + expected_data = [ + { + "id": ViewBase.get_db_item_id(task_with_none_ids), + "type": resource_type, + "attributes": { + # not `None`! schema validator returns empty list `[]` + "task_ids": [], + "another_task_ids": [], }, - ], - "jsonapi": { - "version": "1.0", - }, - "meta": { - "count": 1, - "totalPages": 1, }, - } + ] + assert response_data["data"] == expected_data - async def test_base_model_root_validator_create( + async def test_base_model_model_validator_create( self, app: FastAPI, client: AsyncClient, @@ -112,6 +314,7 @@ async def test_base_model_root_validator_create( task_data = { # should be converted to [] by schema on create "task_ids": None, + "another_task_ids": None, } data_create = { "data": { @@ -126,16 +329,17 @@ async def test_base_model_root_validator_create( task_id = response_data["data"].pop("id") task = await async_session.get(Task, int(task_id)) assert isinstance(task, Task) - assert task.task_ids == [] # we sent request with `None`, but value in db is `[]` # because validator converted data before object creation assert task.task_ids == [] + assert task.another_task_ids == [] assert response_data == { "data": { "type": resource_type, "attributes": { # should be empty list "task_ids": [], + "another_task_ids": [], }, }, "jsonapi": {"version": "1.0"}, @@ -143,35 +347,18 @@ async def test_base_model_root_validator_create( } +@pytest.mark.usefixtures("refresh_db", "refresh_caches") class TestValidators: resource_type = "validator" - @fixture(autouse=True) - def _refresh_caches(self) -> None: - object_schemas_cache = deepcopy(SchemaBuilder.object_schemas_cache) - relationship_schema_cache = deepcopy(SchemaBuilder.relationship_schema_cache) - base_jsonapi_object_schemas_cache = deepcopy(SchemaBuilder.base_jsonapi_object_schemas_cache) - - all_jsonapi_routers = deepcopy(RoutersJSONAPI.all_jsonapi_routers) - - yield - - SchemaBuilder.object_schemas_cache = object_schemas_cache - SchemaBuilder.relationship_schema_cache = relationship_schema_cache - SchemaBuilder.base_jsonapi_object_schemas_cache = base_jsonapi_object_schemas_cache - - RoutersJSONAPI.all_jsonapi_routers = all_jsonapi_routers - - def build_app(self, schema, resource_type: Optional[str] = None) -> FastAPI: + def build_app(self, schema, resource_type: str | None = None) -> FastAPI: return build_app_custom( model=User, schema=schema, - # schema_in_post=schema, - # schema_in_patch=schema, resource_type=resource_type or self.resource_type, ) - def inherit(self, schema: Type[BaseModel]) -> Type[BaseModel]: + def inherit(self, schema: type[PydanticBaseModel]) -> type[PydanticBaseModel]: class InheritedSchema(schema): pass @@ -180,31 +367,29 @@ class InheritedSchema(schema): async def execute_request_and_check_response( self, app: FastAPI, - body: Dict, + body: dict, expected_detail: str, - resource_type: Optional[str] = None, + resource_type: str | None = None, + expected_status: int = status.HTTP_422_UNPROCESSABLE_ENTITY, ): resource_type = resource_type or self.resource_type async with AsyncClient(app=app, base_url="http://test") as client: - url = app.url_path_for(f"get_{resource_type}_list") + url = app.url_path_for(f"create_{resource_type}_list") res = await client.post(url, json=body) - assert res.status_code == status.HTTP_400_BAD_REQUEST, res.text - assert res.json() == { - "errors": [ - { - "detail": expected_detail, - "source": {"pointer": ""}, - "status_code": status.HTTP_400_BAD_REQUEST, - "title": "Bad Request", - }, - ], - } + assert res.status_code == expected_status, res.text + response_json = res.json() + + assert response_json + assert "detail" in response_json, response_json + error = response_json["detail"][0] + assert error["msg"].endswith(expected_detail), (error, expected_detail) async def execute_request_twice_and_check_response( self, - schema: Type[BaseModel], - body: Dict, + schema: type[PydanticBaseModel], + body: dict, expected_detail: str, + expected_status: int = status.HTTP_422_UNPROCESSABLE_ENTITY, ): """ Makes two requests for check schema inheritance @@ -220,157 +405,170 @@ async def execute_request_twice_and_check_response( body=body, expected_detail=expected_detail, resource_type=resource_type, + expected_status=expected_status, ) - async def test_field_validator_call(self): + async def test_field_validator_call(self, format_error): """ Basic check to ensure that field validator called """ - class UserSchemaWithValidator(BaseModel): + class UserSchemaWithValidator(PydanticBaseModel): name: str @field_validator("name") - @classmethod - def validate_name(cls, v): - # checks that cls arg is not bound to the origin class - assert cls is not UserSchemaWithValidator - - raise BadRequest(detail="Check validator") + @staticmethod + def validate_name(value): + raise ValueError(format_error(value)) model_config = ConfigDict(from_attributes=True) - attrs = {"name": fake.name()} + new_name = fake.name() + attrs = {"name": new_name} create_user_body = {"data": {"attributes": attrs}} await self.execute_request_twice_and_check_response( schema=UserSchemaWithValidator, body=create_user_body, - expected_detail="Check validator", + expected_detail=format_error(new_name), ) - async def test_field_validator_each_item_arg(self): - class UserSchemaWithValidator(BaseModel): - names: List[str] + async def test_field_validator_each_item_arg(self, format_error): + flag_name = fake.word() + + class UserSchemaWithValidator(PydanticBaseModel): + names: list[str] @field_validator("names") - @classmethod - def validate_name(cls, v): + @staticmethod + def validate_name(v): for item in v: - if item == "bad_name": - raise BadRequest(detail="Bad name not allowed") + if item == flag_name: + raise ValueError(format_error(item)) model_config = ConfigDict(from_attributes=True) - attrs = {"names": ["good_name", "bad_name"]} + attrs = {"names": [fake.name(), flag_name]} create_user_body = {"data": {"attributes": attrs}} await self.execute_request_twice_and_check_response( schema=UserSchemaWithValidator, body=create_user_body, - expected_detail="Bad name not allowed", + expected_detail=format_error(flag_name), ) - async def test_field_validator_pre_arg(self): - class UserSchemaWithValidator(BaseModel): - name: List[str] + async def test_field_validator_pre_arg(self, format_error): + class UserSchemaWithValidator(PydanticBaseModel): + name: list[str] @field_validator("name", mode="before") - @classmethod - def validate_name_pre(cls, v): - raise BadRequest(detail="Pre validator called") + @staticmethod + def validate_name_pre(value): + raise ValueError(format_error(value)) - @field_validator("name") - @classmethod - def validate_name(cls, v): - raise BadRequest(detail="Not pre validator called") + @field_validator("name", mode="after") + @staticmethod + def validate_name(value): + raise ValueError("not this!") model_config = ConfigDict(from_attributes=True) - attrs = {"name": fake.name()} + new_name = fake.name() + attrs = {"name": new_name} create_user_body = {"data": {"attributes": attrs}} await self.execute_request_twice_and_check_response( schema=UserSchemaWithValidator, body=create_user_body, - expected_detail="Pre validator called", + expected_detail=format_error(new_name), ) - async def test_field_validator_always_arg(self): - class UserSchemaWithValidator(BaseModel): + async def test_field_validator_always_arg(self, format_error): + class UserSchemaWithValidator(PydanticBaseModel): name: str = None @field_validator("name") - @classmethod - def validate_name(cls, v): - raise BadRequest(detail="Called always validator") + @staticmethod + def validate_name(v): + raise ValueError(format_error(v)) model_config = ConfigDict(from_attributes=True) - create_user_body = {"data": {"attributes": {}}} + new_name = fake.name() + create_user_body = {"data": {"attributes": {"name": new_name}}} await self.execute_request_twice_and_check_response( schema=UserSchemaWithValidator, body=create_user_body, - expected_detail="Called always validator", + expected_detail=format_error(new_name), ) - async def test_field_validator_several_validators(self): - class UserSchemaWithValidator(BaseModel): + async def test_field_validator_several_validators(self, reformat_error): + validator_1_marker = fake.word() + validator_2_marker = fake.word() + + validator_1_flag = fake.sentence() + validator_2_flag = fake.sentence() + + class UserSchemaWithValidator(PydanticBaseModel): field: str @field_validator("field") - @classmethod - def validator_1(cls, v): - if v == "check_validator_1": - raise BadRequest(detail="Called validator 1") + @staticmethod + def validator_1(value): + if value == validator_1_flag: + raise ValueError(reformat_error(validator_1_marker, value)) - return v + return value @field_validator("field") - @classmethod - def validator_2(cls, v): - if v == "check_validator_2": - raise BadRequest(detail="Called validator 2") + @staticmethod + def validator_2(value): + if value == validator_2_flag: + raise ValueError(reformat_error(validator_2_marker, value)) - return v + return value model_config = ConfigDict(from_attributes=True) - attrs = {"field": "check_validator_1"} + attrs = {"field": validator_1_flag} create_user_body = {"data": {"attributes": attrs}} app = self.build_app(UserSchemaWithValidator) await self.execute_request_and_check_response( app=app, body=create_user_body, - expected_detail="Called validator 1", + expected_detail=reformat_error(validator_1_marker, validator_1_flag), ) - attrs = {"field": "check_validator_2"} + attrs = {"field": validator_2_flag} create_user_body = {"data": {"attributes": attrs}} await self.execute_request_and_check_response( app=app, body=create_user_body, - expected_detail="Called validator 2", + expected_detail=reformat_error(validator_2_marker, validator_2_flag), ) - async def test_field_validator_asterisk(self): - class UserSchemaWithValidator(BaseModel): + async def test_field_validator_asterisk(self, reformat_error): + bad_value = fake.word() + + class UserSchemaWithValidator(PydanticBaseModel): field_1: str field_2: str @field_validator("*", mode="before") - @classmethod - def validator(cls, v): - if v == "bad_value": - raise BadRequest(detail="Check validator") + @staticmethod + def validator(v, validation_info: ValidationInfo): + if v == bad_value: + raise ValueError(reformat_error(validation_info.field_name, v)) + return v model_config = ConfigDict(from_attributes=True) + error_field = "field_1" attrs = { - "field_1": "bad_value", + error_field: bad_value, "field_2": "", } create_user_body = {"data": {"attributes": attrs}} @@ -379,68 +577,75 @@ def validator(cls, v): await self.execute_request_and_check_response( app=app, body=create_user_body, - expected_detail="Check validator", + expected_detail=reformat_error(error_field, bad_value), ) + error_field = "field_2" attrs = { "field_1": "", - "field_2": "bad_value", + error_field: bad_value, } create_user_body = {"data": {"attributes": attrs}} await self.execute_request_and_check_response( app=app, body=create_user_body, - expected_detail="Check validator", + expected_detail=reformat_error(error_field, bad_value), ) - async def test_check_validator_for_id_field(self): + @pytest.mark.usefixtures("refresh_db") + async def test_check_validator_for_id_field(self, format_error): """ Unusual case because of "id" field handling in a different way than attributes """ - class UserSchemaWithValidator(BaseModel): - id: int = Field(json_schema_extra={"client_can_set_id": True}) + class UserSchemaWithValidator(PydanticBaseModel): + id: Annotated[int, ClientCanSetId()] - @field_validator("id") - @classmethod - def validate_id(cls, v): - raise BadRequest(detail="Check validator") + @field_validator("id", mode="after") + @staticmethod + def validate_id(value): + raise ValueError(format_error(value)) model_config = ConfigDict(from_attributes=True) + id_val = fake.pyint(min_value=10, max_value=100) create_user_body = { "data": { - "attributes": {}, - "id": 42, + "attributes": {"name": fake.name()}, + "id": str(id_val), }, } await self.execute_request_twice_and_check_response( schema=UserSchemaWithValidator, body=create_user_body, - expected_detail="Check validator", + expected_detail=format_error(id_val), ) - @mark.parametrize( + @pytest.mark.parametrize( "inherit", [ - param(True, id="inherited_true"), - param(False, id="inherited_false"), + pytest.param(True, id="inherited_true"), + pytest.param(False, id="inherited_false"), ], ) async def test_field_validator_can_change_value(self, inherit: bool): - class UserSchemaWithValidator(BaseModel): + def modificator(v: str) -> str: + return v.title() + + class UserSchemaWithValidator(PydanticBaseModel): name: str @field_validator("name") - @classmethod - def fix_title(cls, v): - return v.title() + @staticmethod + def fix_title(v): + return modificator(v) model_config = ConfigDict(from_attributes=True) - attrs = {"name": "john doe"} + name_lower = fake.name().lower() + attrs = {"name": name_lower} create_user_body = {"data": {"attributes": attrs}} if inherit: @@ -453,61 +658,80 @@ def fix_title(cls, v): assert res.status_code == status.HTTP_201_CREATED, res.text res_json = res.json() - assert res_json["data"] - assert res_json["data"].pop("id") - assert res_json == { - "data": { - "attributes": {"name": "John Doe"}, - "type": "validator", - }, - "jsonapi": {"version": "1.0"}, - "meta": None, - } - @mark.parametrize( - ("name", "expected_detail"), + expected_name = modificator(name_lower) + assert expected_name != name_lower + assert res_json["data"] + assert res_json["data"]["id"] + data = res_json["data"] + data.pop("id") + assert data == { + "attributes": {"name": expected_name}, + "type": "validator", + } + + @pytest.mark.parametrize( + ("name_idx"), [ - param("check_pre_1", "Raised 1 pre validator", id="check_1_pre_validator"), - param("check_pre_2", "Raised 2 pre validator", id="check_2_pre_validator"), - param("check_post_1", "Raised 1 post validator", id="check_1_post_validator"), - param("check_post_2", "Raised 2 post validator", id="check_2_post_validator"), + pytest.param(0, id="check_1_pre_validator"), + pytest.param(1, id="check_2_pre_validator"), + pytest.param(2, id="check_1_post_validator"), + pytest.param(3, id="check_2_post_validator"), ], ) - async def test_root_validator(self, name: str, expected_detail: str): - class UserSchemaWithValidator(BaseModel): + async def test_model_validators(self, reformat_error, name_idx: int): + flag_pre_1 = fake.word() + "_pre_1" + flag_pre_2 = fake.word() + "_pre_2" + flag_post_1 = fake.word() + "_post_1" + flag_post_2 = fake.word() + "_post_2" + + flags = [flag_pre_1, flag_pre_2, flag_post_1, flag_post_2] + name = flags[name_idx] + + marker_pre_1 = fake.word() + "_pre_1" + marker_pre_2 = fake.word() + "_pre_2" + marker_post_1 = fake.word() + "_post_1" + marker_post_2 = fake.word() + "_post_2" + + markers = [marker_pre_1, marker_pre_2, marker_post_1, marker_post_2] + marker = markers[name_idx] + + class UserSchemaWithModelValidator(PydanticBaseModel): name: str @model_validator(mode="before") - @classmethod - def validator_pre_1(cls, values): - if values["name"] == "check_pre_1": - raise BadRequest(detail="Raised 1 pre validator") + @staticmethod + def validator_pre_1(values): + if values["name"] == flag_pre_1: + raise ValueError(reformat_error(marker_pre_1, values["name"])) return values @model_validator(mode="before") - @classmethod - def validator_pre_2(cls, values): - if values["name"] == "check_pre_2": - raise BadRequest(detail="Raised 2 pre validator") + @staticmethod + def validator_pre_2(values): + if values["name"] == flag_pre_2: + raise ValueError(reformat_error(marker_pre_2, values["name"])) return values @model_validator(mode="after") - @classmethod - def validator_post_1(cls, values): - if values["name"] == "check_post_1": - raise BadRequest(detail="Raised 1 post validator") + @staticmethod + def validator_post_1(model): + value_name = model.name + if value_name == flag_post_1: + raise ValueError(reformat_error(marker_post_1, value_name)) - return values + return model @model_validator(mode="after") - @classmethod - def validator_post_2(cls, values): - if values["name"] == "check_post_2": - raise BadRequest(detail="Raised 2 post validator") + @staticmethod + def validator_post_2(model): + value_name = model.name + if value_name == flag_post_2: + raise ValueError(reformat_error(marker_post_2, value_name)) - return values + return model model_config = ConfigDict(from_attributes=True) @@ -515,31 +739,35 @@ def validator_post_2(cls, values): create_user_body = {"data": {"attributes": attrs}} await self.execute_request_twice_and_check_response( - schema=UserSchemaWithValidator, + schema=UserSchemaWithModelValidator, body=create_user_body, - expected_detail=expected_detail, + expected_detail=reformat_error(marker, name), ) - @mark.parametrize( + @pytest.mark.parametrize( "inherit", [ - param(True, id="inherited_true"), - param(False, id="inherited_false"), + pytest.param(True, id="inherited_true"), + pytest.param(False, id="inherited_false"), ], ) - async def test_root_validator_can_change_value(self, inherit: bool): - class UserSchemaWithValidator(BaseModel): + async def test_model_validator_can_change_value(self, inherit: bool, format_error): + def modificator(v: str) -> str: + return v.title() + + class UserSchemaWithValidator(PydanticBaseModel): name: str @model_validator(mode="after") - @classmethod - def fix_title(cls, v): - v["name"] = v["name"].title() - return v + @staticmethod + def fix_title(model): + model.name = modificator(model.name) + return model model_config = ConfigDict(from_attributes=True) - attrs = {"name": "john doe"} + new_name_lower = fake.name().lower() + attrs = {"name": new_name_lower} create_user_body = {"data": {"attributes": attrs}} if inherit: @@ -550,61 +778,82 @@ def fix_title(cls, v): url = app.url_path_for(f"get_{self.resource_type}_list") res = await client.post(url, json=create_user_body) assert res.status_code == status.HTTP_201_CREATED, res.text - res_json = res.json() - assert res_json["data"] - assert res_json["data"].pop("id") - assert res_json == { - "data": { - "attributes": {"name": "John Doe"}, - "type": "validator", - }, - "jsonapi": {"version": "1.0"}, - "meta": None, - } - @mark.parametrize( - ("name", "expected_detail"), + expected_name = modificator(new_name_lower) + assert expected_name != new_name_lower + + assert res_json["data"] + assert res_json["data"].pop("id") + assert res_json == { + "data": { + "attributes": {"name": expected_name}, + "type": "validator", + }, + "jsonapi": {"version": "1.0"}, + "meta": None, + } + + @pytest.mark.parametrize( + ("name_idx",), [ - param("check_pre_1", "check_pre_1", id="check_1_pre_validator"), - param("check_pre_2", "check_pre_2", id="check_2_pre_validator"), - param("check_post_1", "check_post_1", id="check_1_post_validator"), - param("check_post_2", "check_post_2", id="check_2_post_validator"), + pytest.param(0, id="check_1_pre_validator"), + pytest.param(1, id="check_2_pre_validator"), + pytest.param(2, id="check_1_post_validator"), + pytest.param(3, id="check_2_post_validator"), ], ) - async def test_root_validator_inheritance(self, name: str, expected_detail: str): - class UserSchemaWithValidatorBase(BaseModel): + async def test_model_validator_inheritance(self, name_idx: int, reformat_error): + flag_pre_1 = fake.word() + "_pre_1" + flag_pre_2 = fake.word() + "_pre_2" + flag_post_1 = fake.word() + "_post_1" + flag_post_2 = fake.word() + "_post_2" + + flags = [flag_pre_1, flag_pre_2, flag_post_1, flag_post_2] + name = flags[name_idx] + + marker_pre_1 = fake.word() + "_pre_1" + marker_pre_2 = fake.word() + "_pre_2" + marker_post_1 = fake.word() + "_post_1" + marker_post_2 = fake.word() + "_post_2" + + markers = [marker_pre_1, marker_pre_2, marker_post_1, marker_post_2] + marker = markers[name_idx] + + class UserSchemaWithValidatorBase(PydanticBaseModel): name: str @model_validator(mode="before") - @classmethod - def validator_pre_1(cls, values): - if values["name"] == "check_pre_1": - raise BadRequest(detail="Base check_pre_1") + @staticmethod + def validator_pre_1(values): + if values["name"] == flag_pre_1: + raise ValueError(reformat_error(fake.word(), values["name"])) return values @model_validator(mode="before") - @classmethod - def validator_pre_2(cls, values): - if values["name"] == "check_pre_2": - raise BadRequest(detail="Base check_pre_2") + @staticmethod + def validator_pre_2(values): + if values["name"] == flag_pre_2: + raise ValueError(reformat_error(fake.word(), values["name"])) return values - @classmethod - def validator_post_1(cls, values): - if values["name"] == "check_post_1": - raise BadRequest(detail="Base check_post_1") + @model_validator(mode="after") + @staticmethod + def validator_post_1(model): + if model.name == flag_post_1: + raise ValueError(reformat_error(fake.word(), model.name)) - return values + return model - @classmethod - def validator_post_2(cls, values): - if values["name"] == "check_post_2": - raise BadRequest(detail="Base check_post_2") + @model_validator(mode="after") + @staticmethod + def validator_post_2(model): + if model.name == flag_post_2: + raise ValueError(reformat_error(fake.word(), model.name)) - return values + return model model_config = ConfigDict(from_attributes=True) @@ -612,34 +861,36 @@ class UserSchemaWithValidator(UserSchemaWithValidatorBase): name: str @model_validator(mode="before") - @classmethod - def validator_pre_1(cls, values): - if values["name"] == "check_pre_1": - raise BadRequest(detail="check_pre_1") + @staticmethod + def validator_pre_1(values): + if values["name"] == flag_pre_1: + raise ValueError(reformat_error(marker_pre_1, values["name"])) return values @model_validator(mode="before") - @classmethod - def validator_pre_2(cls, values): - if values["name"] == "check_pre_2": - raise BadRequest(detail="check_pre_2") + @staticmethod + def validator_pre_2(values): + if values["name"] == flag_pre_2: + raise ValueError(reformat_error(marker_pre_2, values["name"])) return values - @classmethod - def validator_post_1(cls, values): - if values["name"] == "check_post_1": - raise BadRequest(detail="check_post_1") + @model_validator(mode="after") + @staticmethod + def validator_post_1(model): + if model.name == flag_post_1: + raise ValueError(reformat_error(marker_post_1, model.name)) - return values + return model - @classmethod - def validator_post_2(cls, values): - if values["name"] == "check_post_2": - raise BadRequest(detail="check_post_2") + @model_validator(mode="after") + @staticmethod + def validator_post_2(model): + if model.name == flag_post_2: + raise ValueError(reformat_error(marker_post_2, model.name)) - return values + return model model_config = ConfigDict(from_attributes=True) @@ -649,25 +900,25 @@ def validator_post_2(cls, values): await self.execute_request_and_check_response( app=self.build_app(UserSchemaWithValidator), body=create_user_body, - expected_detail=expected_detail, + expected_detail=reformat_error(marker, name), ) class TestValidationUtils: - @mark.parametrize( + @pytest.mark.parametrize( ("include", "exclude", "expected"), [ - param({"item_1"}, None, {"item_1_validator"}, id="include"), - param(None, {"item_1"}, {"item_2_validator"}, id="exclude"), - param(None, None, {"item_1_validator", "item_2_validator"}, id="empty_params"), - param({"item_1", "item_2"}, {"item_2"}, {"item_1_validator"}, id="intersection"), + pytest.param({"item_1"}, None, {"item_1_validator"}, id="include"), + pytest.param(None, {"item_1"}, {"item_2_validator"}, id="exclude"), + pytest.param(None, None, {"item_1_validator", "item_2_validator"}, id="empty_params"), + pytest.param({"item_1", "item_2"}, {"item_2"}, {"item_1_validator"}, id="intersection"), ], ) def test_extract_field_validators_args( self, - include: Set[str], - exclude: Set[str], - expected: Set[str], + exclude: set[str], + include: set[str], + expected: set[str], ): class ValidationSchema(BaseModel): item_1: str @@ -683,13 +934,10 @@ def item_1_validator(cls, v): def item_2_validator(cls, v): return v - validators = extract_field_validators( + validators = extract_validators( model=ValidationSchema, include_for_field_names=include, exclude_for_field_names=exclude, ) - validator_func_names = { - validator_item.__validator_config__[1].func.__name__ for validator_item in validators.values() - } - assert expected == validator_func_names + assert set(validators) == expected diff --git a/tests/test_atomic/conftest.py b/tests/test_atomic/conftest.py index 7be3e8b2..632ea41a 100644 --- a/tests/test_atomic/conftest.py +++ b/tests/test_atomic/conftest.py @@ -1,5 +1,7 @@ from __future__ import annotations +from collections.abc import Sequence + import pytest from fastapi_jsonapi.atomic.schemas import AtomicOperationAction @@ -10,6 +12,22 @@ def allowed_atomic_actions_list() -> list[str]: return [op.value for op in AtomicOperationAction] +def options_as_pydantic_choices_string(options: Sequence[str]) -> str: + if len(options) == 1: + return repr(options[0]) + return " or ".join( + ( + ", ".join(repr(op) for op in options[:-1]), + repr(options[-1]), + ), + ) + + +@pytest.fixture() +def atomic_operation_actions_as_str(): + return options_as_pydantic_choices_string([v.value for v in AtomicOperationAction]) + + @pytest.fixture() def allowed_atomic_actions_as_string(allowed_atomic_actions_list) -> str: - return ", ".join(repr(op) for op in allowed_atomic_actions_list) + return options_as_pydantic_choices_string(allowed_atomic_actions_list) diff --git a/tests/test_atomic/test_create_objects.py b/tests/test_atomic/test_create_objects.py index e8af132f..7f1ec793 100644 --- a/tests/test_atomic/test_create_objects.py +++ b/tests/test_atomic/test_create_objects.py @@ -1,16 +1,20 @@ import logging -from typing import Callable, Sequence +from typing import TYPE_CHECKING, Callable import pytest from httpx import AsyncClient -from pytest import mark # noqa -from sqlalchemy import and_, or_, select -from sqlalchemy.engine import Result +from sqlalchemy import ( + and_, + or_, + select, +) from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import joinedload from sqlalchemy.sql.functions import count from starlette import status +from fastapi_jsonapi.atomic.atomic_handler import OPERATION_VALIDATION_ERROR_TEXT +from fastapi_jsonapi.views.view_base import ViewBase from tests.misc.utils import fake from tests.models import Child, Parent, ParentToChildAssociation, User, UserBio from tests.schemas import ( @@ -22,12 +26,18 @@ UserBioAttributesBaseSchema, ) +if TYPE_CHECKING: + from collections.abc import Sequence + + from sqlalchemy.engine import Result + COLUMN_CHARACTERS_LIMIT = 50 -pytestmark = mark.asyncio logging.basicConfig(level=logging.DEBUG) +logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO) + def random_sentence() -> str: return fake.sentence()[:COLUMN_CHARACTERS_LIMIT] @@ -45,9 +55,14 @@ async def test_operations_empty_list(self, client: AsyncClient): "detail": [ { "loc": ["body", "atomic:operations"], - "msg": "ensure this value has at least 1 items", - "type": "value_error.list.min_items", - "ctx": {"limit_value": 1}, + "input": [], + "msg": "List should have at least 1 item after validation, not 0", + "type": "too_short", + "ctx": { + "min_length": 1, + "field_type": "List", + "actual_length": 0, + }, }, ], } @@ -88,7 +103,7 @@ async def test_create_one_object( assert result == { "data": { "attributes": UserAttributesBaseSchema.model_validate(user_obj).model_dump(), - "id": str(user_obj.id), + "id": ViewBase.get_db_item_id(user_obj), "type": "user", }, } @@ -223,7 +238,7 @@ async def test_create_bio_with_relationship_to_user_to_one( "relationships": { "user": { "data": { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, }, @@ -660,11 +675,9 @@ async def test_resource_type_with_local_id_not_found( "detail": { "data": { **action_2["data"], - "id": None, - "lid": None, }, "error": expected_error_text, - "message": f"Validation error on operation {action_1['op']}", + "message": OPERATION_VALIDATION_ERROR_TEXT.format(operation=action_1["op"]), "ref": None, }, } @@ -672,6 +685,7 @@ async def test_resource_type_with_local_id_not_found( user = await async_session.scalar(user_stmt) assert user is None + @pytest.mark.usefixtures("refresh_db") async def test_local_id_not_found( self, client: AsyncClient, @@ -758,11 +772,9 @@ async def test_local_id_not_found( "detail": { "data": { **action_2["data"], - "id": None, - "lid": None, }, "error": expected_error_text, - "message": f"Validation error on operation {action_2['op']}", + "message": OPERATION_VALIDATION_ERROR_TEXT.format(operation=action_2["op"]), "ref": None, }, } @@ -770,6 +782,7 @@ async def test_local_id_not_found( user = await async_session.scalar(user_stmt) assert user is None + @pytest.mark.usefixtures("refresh_db") async def test_create_and_associate_many_to_many( self, client: AsyncClient, @@ -900,24 +913,23 @@ async def test_create_object_schema_validation_error( response = await client.post("/operations", json=data_atomic_request) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text # TODO: json:api exception - assert response.json() == { - "detail": { - "data": { - **action_add["data"], - "id": None, - "lid": None, - "relationships": None, - }, - "errors": [ - { - "loc": ["data", "attributes", "name"], - "msg": "field required", - "type": "value_error.missing", - }, - ], - "message": f"Validation error on operation {action_add['op']}", - "ref": None, - }, + detail = response.json()["detail"] + errors = detail.pop("errors") + assert len(errors) == 1 + error = errors[0] + url: str = error.pop("url") + assert url.startswith("https://errors.pydantic.dev/"), url + assert url.endswith("/v/missing"), url + assert error == { + "input": {}, + "loc": ["data", "attributes", "name"], + "msg": "Field required", + "type": "missing", + } + assert detail == { + "data": action_add["data"], + "message": OPERATION_VALIDATION_ERROR_TEXT.format(operation=action_add["op"]), + "ref": None, } @pytest.mark.skip("not ready yet") diff --git a/tests/test_atomic/test_current_atomic_operation.py b/tests/test_atomic/test_current_atomic_operation.py index ab8fa20c..6813897f 100644 --- a/tests/test_atomic/test_current_atomic_operation.py +++ b/tests/test_atomic/test_current_atomic_operation.py @@ -1,13 +1,11 @@ from __future__ import annotations -from typing import ClassVar, Dict, Literal, Optional +from typing import TYPE_CHECKING, ClassVar, Literal import pytest from fastapi import Body, Depends, FastAPI, HTTPException, status from httpx import AsyncClient from pydantic import BaseModel -from pytest_asyncio import fixture -from sqlalchemy.ext.asyncio import AsyncSession from fastapi_jsonapi.atomic import current_atomic_operation from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBaseGeneric, ListViewBaseGeneric @@ -16,6 +14,7 @@ HTTPMethod, HTTPMethodConfig, ) +from fastapi_jsonapi.views.view_base import ViewBase from tests.common_user_api_test import ( BaseGenericUserCreateUpdateWithBodyDependency, CustomNameAttributesJSONAPI, @@ -30,19 +29,13 @@ UserSchema, ) -pytestmark = pytest.mark.asyncio +if TYPE_CHECKING: + from sqlalchemy.ext.asyncio import AsyncSession FIELD_CUSTOM_NAME = "custom_name" -# this one can be used only for generic views -# def get_custom_name_from_body_only_on_generic( -# data: CustomNameAttributesJSONAPI = Body(embed=True), -# ) -> str: -# return data.attributes.custom_name - - missing = object() @@ -51,6 +44,14 @@ class AttributesData(BaseModel): + """ + this one can be used only for generic views + def get_custom_name_from_body_only_on_generic( + data: CustomNameAttributesJSONAPI = Body(embed=True), + ) -> str: + return data.attributes.custom_name + """ + data: CustomNameAttributesJSONAPI @@ -60,31 +61,30 @@ class AttributesTopLevelBody(BaseModel): @handle_validation_error def get_validated_attribute_from_body(data: dict): - # # this will work ok, but `loc` in exception text will be `'loc': ['attributes', 'custom_name']` - # # and we need `'loc': ['body', 'data', 'attributes', 'custom_name']` - # validated_data = CustomNameAttributesJSONAPI.parse_obj(data) - # return validated_data.attributes.custom_name - + """ + Why do we create nested dicts? + This example will work: + `validated_data = CustomNameAttributesJSONAPI.parse_obj(data)` + `return validated_data.attributes.custom_name` + this will work ok, but `loc` in exception text will be `'loc': ['attributes', 'custom_name']` + and we need `'loc': ['body', 'data', 'attributes', 'custom_name']` + + :param data: + :return: + """ validated_data = AttributesTopLevelBody.model_validate({"body": {"data": data}}) - # or - # return get_custom_name_from_body_only_on_generic(data=validated_data) - # or return validated_data.body.data.attributes.custom_name async def get_custom_name_from_body_universal( - data: Optional[dict] = Body(None, embed=True), + data: dict | None = Body(None, embed=True), ) -> str: atomic_operation = current_atomic_operation.get(missing) if atomic_operation is missing: # example for same helper both for generic view and atomic view return get_validated_attribute_from_body(data) - # # use dependencies helper because it will raise corresponding errors - # dep_helper = DependencyHelper(request=request) - # return await dep_helper.run(get_custom_name_from_body_only_on_generic) - return get_validated_attribute_from_body(atomic_operation.data.model_dump()) @@ -127,7 +127,7 @@ class UserUpdateCustomDependency(ArbitraryModelBase): class UserCustomListView(ListViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { + method_dependencies: ClassVar[dict[HTTPMethod, HTTPMethodConfig]] = { HTTPMethod.ALL: HTTPMethodConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=common_handler, @@ -139,7 +139,7 @@ class UserCustomListView(ListViewBaseGeneric): class UserCustomDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { + method_dependencies: ClassVar[dict[HTTPMethod, HTTPMethodConfig]] = { HTTPMethod.ALL: HTTPMethodConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=common_handler, @@ -162,7 +162,7 @@ def resource_type(self): @pytest.fixture(scope="class") def app_w_deps(self, resource_type): - app = build_app_custom( + return build_app_custom( model=User, schema=UserSchema, resource_type=resource_type, @@ -170,9 +170,8 @@ def app_w_deps(self, resource_type): class_detail=UserCustomDetailView, path=f"/path_{resource_type}", ) - return app - @fixture(scope="class") + @pytest.fixture() async def client(self, app_w_deps: FastAPI): async with AsyncClient(app=app_w_deps, base_url="http://test") as client: yield client @@ -277,21 +276,24 @@ async def test_atomic_create_user_error_required_body_field_not_passed( resource_type: str, user_attributes: UserAttributesBaseSchema, ): - user_attributes_data = user_attributes.model_dump() - assert self.FIELD_CUSTOM_NAME not in user_attributes_data + attributes_data = user_attributes.model_dump() + assert self.FIELD_CUSTOM_NAME not in attributes_data data_atomic_request = { "atomic:operations": [ { "op": "add", "data": { "type": resource_type, - "attributes": user_attributes_data, + "attributes": attributes_data, }, }, ], } response = await client.post("/operations", json=data_atomic_request) - self.validate_field_not_passed_response(response) + self.validate_field_not_passed_response( + response, + input_data=attributes_data, + ) async def test_atomic_update_user_error_required_body_field_not_passed( self, @@ -303,7 +305,7 @@ async def test_atomic_update_user_error_required_body_field_not_passed( attributes_data = user_attributes.model_dump() assert self.FIELD_CUSTOM_NAME not in attributes_data data_user_update = { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "type": resource_type, "attributes": attributes_data, } @@ -316,7 +318,10 @@ async def test_atomic_update_user_error_required_body_field_not_passed( ], } response = await client.post("/operations", json=data_atomic_request) - self.validate_field_not_passed_response(response) + self.validate_field_not_passed_response( + response, + input_data=attributes_data, + ) async def test_atomic_create_user_error_required_body_field_passed_but_invalid( self, @@ -351,7 +356,7 @@ async def test_atomic_update_user_error_required_body_field_passed_but_invalid( attributes_data = user_attributes.model_dump() attributes_data[self.FIELD_CUSTOM_NAME] = fake.word() data_user_update = { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "type": resource_type, "attributes": attributes_data, } diff --git a/tests/test_atomic/test_delete_objects.py b/tests/test_atomic/test_delete_objects.py index 4bcbc55e..e326ef20 100644 --- a/tests/test_atomic/test_delete_objects.py +++ b/tests/test_atomic/test_delete_objects.py @@ -1,8 +1,9 @@ import logging -from typing import Awaitable, Callable +from collections.abc import Awaitable +from typing import Callable +import pytest from httpx import AsyncClient -from pytest import mark # noqa from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.sql.functions import count @@ -11,7 +12,6 @@ from fastapi_jsonapi.atomic.schemas import AtomicOperationAction from tests.models import Computer -pytestmark = mark.asyncio logging.basicConfig(level=logging.DEBUG) diff --git a/tests/test_atomic/test_dependencies.py b/tests/test_atomic/test_dependencies.py index 19e00ca1..377079da 100644 --- a/tests/test_atomic/test_dependencies.py +++ b/tests/test_atomic/test_dependencies.py @@ -1,15 +1,15 @@ -from typing import ClassVar, Dict +from typing import ClassVar import pytest -from fastapi import Depends, Query, status +from fastapi import Depends, Query, status, FastAPI from httpx import AsyncClient -from pytest_asyncio import fixture from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBaseGeneric, ListViewBaseGeneric from fastapi_jsonapi.views.utils import ( HTTPMethod, HTTPMethodConfig, ) +from fastapi_jsonapi.views.view_base import ViewBase from tests.fixtures.app import build_app_custom from tests.fixtures.views import ArbitraryModelBase, SessionDependency, common_handler from tests.misc.utils import fake @@ -21,8 +21,6 @@ UserSchema, ) -pytestmark = pytest.mark.asyncio - class CustomDependencyForCreate: KEY = "spam_create" @@ -58,7 +56,7 @@ class UserDeleteCustomDependency(ArbitraryModelBase): class UserCustomListView(ListViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { + method_dependencies: ClassVar[dict[HTTPMethod, HTTPMethodConfig]] = { HTTPMethod.ALL: HTTPMethodConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=common_handler, @@ -70,7 +68,7 @@ class UserCustomListView(ListViewBaseGeneric): class UserCustomDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { + method_dependencies: ClassVar[dict[HTTPMethod, HTTPMethodConfig]] = { HTTPMethod.ALL: HTTPMethodConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=common_handler, @@ -91,7 +89,7 @@ def resource_type(self): @pytest.fixture(scope="class") def app_w_deps(self, resource_type): - app = build_app_custom( + return build_app_custom( model=User, schema=UserSchema, schema_in_post=UserInSchema, @@ -100,10 +98,9 @@ def app_w_deps(self, resource_type): class_list=UserCustomListView, class_detail=UserCustomDetailView, ) - return app - @fixture(scope="class") - async def client(self, app_w_deps): + @pytest.fixture() + async def client(self, app_w_deps: FastAPI): async with AsyncClient(app=app_w_deps, base_url="http://test") as client: yield client @@ -145,9 +142,10 @@ async def test_on_create_atomic( expected_response_data = { "detail": [ { + "input": None, "loc": ["query", CustomDependencyForCreate.KEY], - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", }, ], } @@ -172,7 +170,7 @@ async def test_on_update_atomic( "atomic:operations": [ { "op": "update", - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "data": { "type": resource_type, "attributes": user.model_dump(), @@ -183,9 +181,10 @@ async def test_on_update_atomic( expected_response_data = { "detail": [ { + "input": None, "loc": ["query", CustomDependencyForUpdate.KEY], - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", }, ], } @@ -206,7 +205,7 @@ async def test_on_delete_atomic( { "op": "remove", "ref": { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "type": resource_type, }, }, @@ -216,9 +215,10 @@ async def test_on_delete_atomic( expected_response_data = { "detail": [ { + "input": None, "loc": ["query", CustomDependencyForDelete.KEY], - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", }, ], } diff --git a/tests/test_atomic/test_mixed_atomic.py b/tests/test_atomic/test_mixed_atomic.py index e2ea113d..45e66eca 100644 --- a/tests/test_atomic/test_mixed_atomic.py +++ b/tests/test_atomic/test_mixed_atomic.py @@ -1,21 +1,27 @@ from __future__ import annotations import logging -from typing import Awaitable, Callable +from typing import ( + TYPE_CHECKING, + Callable, +) import pytest -from httpx import AsyncClient -from pytest import mark # noqa from sqlalchemy import select -from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.sql.functions import count from starlette import status +from fastapi_jsonapi.views.view_base import ViewBase from tests.misc.utils import fake from tests.models import Computer, User, UserBio from tests.schemas import ComputerAttributesBaseSchema, UserAttributesBaseSchema, UserBioAttributesBaseSchema -pytestmark = mark.asyncio +if TYPE_CHECKING: + from collections.abc import Awaitable + + from httpx import AsyncClient + from sqlalchemy.ext.asyncio import AsyncSession + logging.basicConfig(level=logging.DEBUG) @@ -24,8 +30,7 @@ class TestAtomicMixedActions: async def test_schema_validation_error( self, client: AsyncClient, - allowed_atomic_actions_list: list[str], - allowed_atomic_actions_as_string: str, + atomic_operation_actions_as_str: str, ): operation_name = fake.word() atomic_request_data = { @@ -47,15 +52,14 @@ async def test_schema_validation_error( response_data = response.json() assert response_data == { - # TODO: jsonapi exception? + # TODO: raise jsonapi exception? "detail": [ { + "ctx": {"expected": atomic_operation_actions_as_str}, + "input": operation_name, "loc": ["body", "atomic:operations", 0, "op"], - "msg": f"value is not a valid enumeration member; permitted: {allowed_atomic_actions_as_string}", - "type": "type_error.enum", - "ctx": { - "enum_values": allowed_atomic_actions_list, - }, + "msg": f"Input should be {atomic_operation_actions_as_str}", + "type": "enum", }, ], } @@ -99,7 +103,7 @@ async def test_create_and_update_atomic_success( "relationships": { "user": { "data": { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, }, @@ -109,7 +113,7 @@ async def test_create_and_update_atomic_success( { "op": "update", "data": { - "id": str(user_1_bio.id), + "id": ViewBase.get_db_item_id(user_1_bio), "type": "user_bio", "attributes": user_bio_data.model_dump(), }, @@ -117,7 +121,7 @@ async def test_create_and_update_atomic_success( { "op": "update", "data": { - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", "attributes": user_data.model_dump(), }, @@ -138,7 +142,7 @@ async def test_create_and_update_atomic_success( assert results == [ { "data": { - "id": str(computer.id), + "id": ViewBase.get_db_item_id(computer), "type": "computer", "attributes": new_computer.model_dump(), }, @@ -146,7 +150,7 @@ async def test_create_and_update_atomic_success( }, { "data": { - "id": str(user_1_bio.id), + "id": ViewBase.get_db_item_id(user_1_bio), "type": "user_bio", "attributes": user_bio_data.model_dump(), }, @@ -154,7 +158,7 @@ async def test_create_and_update_atomic_success( }, { "data": { - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", "attributes": user_data.model_dump(), }, @@ -205,7 +209,7 @@ async def test_create_and_update_atomic_rollback( "relationships": { "user": { "data": { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, }, @@ -215,7 +219,7 @@ async def test_create_and_update_atomic_rollback( { "op": "update", "data": { - "id": str(user_1_bio.id), + "id": ViewBase.get_db_item_id(user_1_bio), "type": "user_bio", "attributes": user_bio_data.model_dump(), }, @@ -223,7 +227,7 @@ async def test_create_and_update_atomic_rollback( { "op": "update", "data": { - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", "attributes": user_data.model_dump(), }, @@ -253,7 +257,7 @@ async def test_create_and_update_atomic_rollback( "status_code": status.HTTP_400_BAD_REQUEST, "title": "Bad Request", "meta": { - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, }, @@ -304,7 +308,7 @@ async def test_create_update_and_delete_atomic_success( "relationships": { "user": { "data": { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, }, @@ -314,7 +318,7 @@ async def test_create_update_and_delete_atomic_success( { "op": "update", "data": { - "id": user_1_bio.id, + "id": ViewBase.get_db_item_id(user_1_bio), "type": "user_bio", "attributes": user_bio_data.model_dump(), }, @@ -322,7 +326,7 @@ async def test_create_update_and_delete_atomic_success( { "op": "update", "data": { - "id": user_1.id, + "id": ViewBase.get_db_item_id(user_1), "type": "user", "attributes": user_data.model_dump(), }, @@ -330,7 +334,7 @@ async def test_create_update_and_delete_atomic_success( { "op": "remove", "ref": { - "id": computer.id, + "id": ViewBase.get_db_item_id(computer), "type": "computer", }, }, @@ -353,7 +357,7 @@ async def test_create_update_and_delete_atomic_success( assert results == [ { "data": { - "id": str(computer.id), + "id": ViewBase.get_db_item_id(computer), "type": "computer", "attributes": new_computer.model_dump(), }, @@ -361,7 +365,7 @@ async def test_create_update_and_delete_atomic_success( }, { "data": { - "id": str(user_1_bio.id), + "id": ViewBase.get_db_item_id(user_1_bio), "type": "user_bio", "attributes": user_bio_data.model_dump(), }, @@ -369,7 +373,7 @@ async def test_create_update_and_delete_atomic_success( }, { "data": { - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", "attributes": user_data.model_dump(), }, @@ -378,7 +382,6 @@ async def test_create_update_and_delete_atomic_success( # https://jsonapi.org/ext/atomic/#result-objects # An empty result object ({}) is acceptable for operations that are not required to return data. # TODO: An empty result object ({}) - # {}, { "data": None, "meta": None, @@ -430,7 +433,7 @@ async def test_create_user_and_update_computer_and_link_to_user( { "op": "update", "data": { - "id": str(computer_1.id), + "id": ViewBase.get_db_item_id(computer_1), "type": "computer", "attributes": computer_update.model_dump(), "relationships": { @@ -471,7 +474,7 @@ async def test_create_user_and_update_computer_and_link_to_user( assert results == [ { "data": { - "id": str(user.id), + "id": ViewBase.get_db_item_id(user), "type": "user", "attributes": user_create.model_dump(), }, @@ -479,7 +482,7 @@ async def test_create_user_and_update_computer_and_link_to_user( }, { "data": { - "id": str(computer_1.id), + "id": ViewBase.get_db_item_id(computer_1), "type": "computer", "attributes": computer_update.model_dump(), }, @@ -519,7 +522,7 @@ async def test_create_user_and_link_computer_one_operation( "computers": { "data": [ { - "id": computer_1.id, + "id": ViewBase.get_db_item_id(computer_1), "type": "computer", }, ], @@ -554,7 +557,7 @@ async def test_create_user_and_link_computer_one_operation( assert results == [ { "data": { - "id": str(new_user.id), + "id": ViewBase.get_db_item_id(new_user), "type": "user", "attributes": user_create.model_dump(), }, @@ -565,8 +568,6 @@ async def test_create_user_and_link_computer_one_operation( @pytest.mark.skip("todo: create relationships resources") async def create_user_and_link_existing_computer_to_user( self, - client: AsyncClient, - async_session: AsyncSession, computer_1: Computer, ): """ diff --git a/tests/test_atomic/test_request.py b/tests/test_atomic/test_request.py index af86a93f..bc9da836 100644 --- a/tests/test_atomic/test_request.py +++ b/tests/test_atomic/test_request.py @@ -106,7 +106,4 @@ def test_not_supported_operation( AtomicOperationRequest.model_validate(atomic_request_data) errors = exc_info.value.errors() error = errors[0] - assert ( - error.get("msg") - == f"value is not a valid enumeration member; permitted: {allowed_atomic_actions_as_string}" - ) + assert error.get("msg") == f"Input should be {allowed_atomic_actions_as_string}" diff --git a/tests/test_atomic/test_update_objects.py b/tests/test_atomic/test_update_objects.py index 60119472..c81ac29d 100644 --- a/tests/test_atomic/test_update_objects.py +++ b/tests/test_atomic/test_update_objects.py @@ -5,11 +5,13 @@ from sqlalchemy.ext.asyncio import AsyncSession from starlette import status +from fastapi_jsonapi.views.view_base import ViewBase from tests.misc.utils import fake from tests.models import Computer, User, UserBio -from tests.schemas import UserAttributesBaseSchema, UserBioAttributesBaseSchema - -pytestmark = pytest.mark.asyncio +from tests.schemas import ( + UserAttributesBaseSchema, + UserBioAttributesBaseSchema, +) logging.basicConfig(level=logging.DEBUG) @@ -33,7 +35,7 @@ async def test_update_two_objects( { "op": "update", "data": { - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", "attributes": user_data.model_dump(), }, @@ -41,7 +43,7 @@ async def test_update_two_objects( { "op": "update", "data": { - "id": str(user_1_bio.id), + "id": ViewBase.get_db_item_id(user_1_bio), "type": "user_bio", "attributes": user_bio_data.model_dump(), }, @@ -63,7 +65,7 @@ async def test_update_two_objects( { "data": { "attributes": user_data.model_dump(), - "id": str(user_1.id), + "id": ViewBase.get_db_item_id(user_1), "type": "user", }, "meta": None, @@ -71,7 +73,7 @@ async def test_update_two_objects( { "data": { "attributes": user_bio_data.model_dump(), - "id": str(user_1_bio.id), + "id": ViewBase.get_db_item_id(user_1_bio), "type": "user_bio", }, "meta": None, diff --git a/tests/test_data_layers/test_filtering/test_sqlalchemy.py b/tests/test_data_layers/test_filtering/test_sqlalchemy.py index 1c3c0b51..ea69a21c 100644 --- a/tests/test_data_layers/test_filtering/test_sqlalchemy.py +++ b/tests/test_data_layers/test_filtering/test_sqlalchemy.py @@ -1,9 +1,9 @@ from typing import Any from unittest.mock import MagicMock, Mock +import pytest from fastapi import status from pydantic import BaseModel, ConfigDict -from pytest import raises # noqa PT013 from fastapi_jsonapi.data_layers.filtering.sqlalchemy import ( build_filter_expression, @@ -23,8 +23,11 @@ class ModelSchema(BaseModel): model_column_mock = MagicMock() + # field name for model ModelSchema + field_name = "value" build_filter_expression( - schema_field=ModelSchema.model_fields["value"], + field_name=field_name, + schema_field=ModelSchema.model_fields[field_name], model_column=model_column_mock, operator="__eq__", value=Any, @@ -37,7 +40,7 @@ class ModelSchema(BaseModel): def test_user_type_cast_fail(self): class UserType: - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs): # noqa: ARG002 msg = "Cast failed" raise ValueError(msg) @@ -45,9 +48,12 @@ class ModelSchema(BaseModel): user_type: UserType model_config = ConfigDict(arbitrary_types_allowed=True) - with raises(InvalidType) as exc_info: + # field name for model ModelSchema + field_name = "user_type" + with pytest.raises(InvalidType) as exc_info: build_filter_expression( - schema_field=ModelSchema.model_fields["user_type"], + field_name=field_name, + schema_field=ModelSchema.model_fields[field_name], model_column=Mock(), operator=Mock(), value=Any, diff --git a/tests/test_fastapi_jsonapi/test_querystring.py b/tests/test_fastapi_jsonapi/test_querystring.py index dec04365..4eac4d69 100644 --- a/tests/test_fastapi_jsonapi/test_querystring.py +++ b/tests/test_fastapi_jsonapi/test_querystring.py @@ -14,11 +14,11 @@ def test__extract_item_key(): manager = QueryStringManager(MagicMock()) key = "fields[user]" - assert manager._extract_item_key(key) == "user" + assert manager.extract_item_key(key) == "user" with pytest.raises(BadRequest) as exc_info: # noqa: PT012 key = "fields[user" - manager._extract_item_key(key) + manager.extract_item_key(key) assert exc_info.value.status_code == status.HTTP_400_BAD_REQUEST assert exc_info.value.detail == { @@ -46,7 +46,7 @@ def test_filters__errors(): manager = QueryStringManager(request) with pytest.raises(InvalidFilters) as exc_info: - manager.filters + assert isinstance(manager.filters, list) assert exc_info.value.status_code == status.HTTP_400_BAD_REQUEST assert exc_info.value.detail == { @@ -77,7 +77,7 @@ def test_filters__errors(): manager = QueryStringManager(request) with pytest.raises(InvalidFilters) as exc_info: - manager.filters + assert isinstance(manager.filters, list) assert exc_info.value.status_code == status.HTTP_400_BAD_REQUEST assert exc_info.value.detail == { diff --git a/tests/test_utils/test_dependency_helper.py b/tests/test_utils/test_dependency_helper.py index e8ba5e93..59a5ef9e 100644 --- a/tests/test_utils/test_dependency_helper.py +++ b/tests/test_utils/test_dependency_helper.py @@ -2,7 +2,6 @@ from string import ascii_letters from unittest.mock import AsyncMock -import pytest from fastapi import ( Depends, Request, @@ -10,8 +9,6 @@ from fastapi_jsonapi.utils.dependency_helper import DependencyHelper -pytestmark = pytest.mark.asyncio - class TestDependencyHelper: async def test_dependency_helper(self):