diff --git a/CHANGELOG.md b/CHANGELOG.md index 0602adef..10453dac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [2.4.4] - 2025-11-?? +## [2.4.4] - 2025-11-15 :mount_fuji: - Introduce `MiddlewareList` and `MiddlewareCategory` to simplify middleware management and ordering of middlewares (see [#620](https://github.com/Neoteroi/BlackSheep/issues/620)). @@ -29,15 +29,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 when mapping to user-defined dataclasses, Pydantic v2 models, or classes (see [#614](https://github.com/Neoteroi/BlackSheep/issues/614)). Previously, extra properties were not ignored by default and required the user to explicitly code their - input classes to allow extra properties. - This is also done for expected input body declared as `list[T]`, `Sequence[T]`, and - `tuple[T]` where `T` is a dataclass, Pydantic model, or plain class. - The user can still control how input bodies from clients are converted using custom - binders or altering `blacksheep.server.bindings.class_converters`. - **Note:** automatic type conversion from strings is not performed for object properties. - Use Pydantic models if you want this feature. Example: dates can require conversion - when mapping JSON input, and everything is transmitted as text when using multipart - form data. + input classes to allow extra properties. This is also done for sub-properties, lists, + and dictionaries. The user can still control how exactly input bodies from clients are + converted using custom binders or altering + `blacksheep.server.bindings.class_converters`. - Add support for specifying OpenAPI tags for controllers. This simplifies handling tags for documentation ([#616](https://github.com/Neoteroi/BlackSheep/issues/616)). - Improve the build matrix to build wheels for `arm64` architecture for Linux and @@ -56,14 +51,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Attach `EnvironmentSettings` to the `Application` object for runtime inspection, which is useful for: transparency and debugging, testing (`assert app.env_settings.force_https is True`), health check endpoints or admin tools - can expose configuration: - -```python -@get("/health/config") -async def config_info(): - return {"env": app.env_settings.env, ...} -``` - + can expose configuration. - Add `HTTPSchemeMiddleware` to set request scheme when running behind reverse proxies or load balancers with TLS termination. See [#631](https://github.com/Neoteroi/BlackSheep/issues/631). @@ -82,6 +70,9 @@ async def config_info(): - Improve `OpenIDSettings`, `CookieAuthentication`, and `AntiForgeryHandler` to handle secrets using the `Secret` class from `essentials.secrets`. Passing secrets as `str` directly issues a deprecation warning and won't be supported in `2.5.x` or `2.6.x`. +- Add support for annotated types in `OpenAPIHandler` return types, by @tyzhnenko. This + feature is important to support automatic generation of OpenAPI Documentation when + returning instances of `Response` (e.g. `Annotated[Response, ProductDetails]`). ## [2.4.3] - 2025-10-19 :musical_keyboard: diff --git a/blacksheep/__init__.py b/blacksheep/__init__.py index cf53f739..e0b09276 100644 --- a/blacksheep/__init__.py +++ b/blacksheep/__init__.py @@ -4,7 +4,7 @@ """ __author__ = "Roberto Prevato " -__version__ = "2.4.4a2" +__version__ = "2.4.4" from .contents import Content as Content from .contents import FormContent as FormContent diff --git a/blacksheep/server/bindings/__init__.py b/blacksheep/server/bindings/__init__.py index 0a2e0f11..45037152 100644 --- a/blacksheep/server/bindings/__init__.py +++ b/blacksheep/server/bindings/__init__.py @@ -383,13 +383,6 @@ def __init__(self, expected_type, binder_type): ) -def _try_get_type_name(expected_type) -> str: - try: - return expected_type.__name__ - except AttributeError: # pragma: no cover - return expected_type - - def get_default_class_converter(expected_type): for converter in class_converters: if converter.can_convert(expected_type): diff --git a/blacksheep/server/bindings/converters.py b/blacksheep/server/bindings/converters.py index b447d225..3baca034 100644 --- a/blacksheep/server/bindings/converters.py +++ b/blacksheep/server/bindings/converters.py @@ -13,7 +13,17 @@ from collections.abc import Sequence as SequenceABC from dataclasses import fields, is_dataclass from datetime import date, datetime -from typing import Any, Callable, List, Literal, Sequence, get_args, get_origin +from functools import lru_cache +from typing import ( + Any, + Callable, + List, + Literal, + Sequence, + get_args, + get_origin, + get_type_hints, +) from urllib.parse import unquote from uuid import UUID @@ -56,7 +66,11 @@ def can_convert(self, expected_type) -> bool: return expected_type is str or str(expected_type) == "~T" def convert(self, value, expected_type) -> Any: - return unquote(value) if value else None + if value is None: + return None + if "%" in value: + return unquote(value) + return value class InitTypeConverter(TypeConverter): @@ -215,123 +229,208 @@ def convert(self, value, expected_type) -> Any: raise ValueError(f"{value!r} is not a valid {expected_type}") -class ClassConverter(TypeConverter): - def __init__(self): - self._valid_params_cache = {} - +class _AsIsConverter(TypeConverter): def can_convert(self, expected_type) -> bool: - # Must be a class - if not inspect.isclass(expected_type): - return False + return True - # Exclude built-in types - if expected_type.__module__ == "builtins": - return False + def convert(self, value, expected_type) -> Any: + return value - # Exclude typing module generics - if get_origin(expected_type) is not None: - return False - # Exclude types that already have specific converters - # (this prevents conflicts with existing converters) - for converter in converters: - if converter != self and converter.can_convert(expected_type): - return False +_as_is_converter = _AsIsConverter() - # Must have a callable __init__ method - if not hasattr(expected_type, "__init__"): - return False - return True - - def _get_valid_params(self, expected_type): - # Check cache first - if expected_type not in self._valid_params_cache: - # Filter out unknown parameters to ignore extra properties - try: - sig = inspect.signature(expected_type.__init__) - valid_params = set(sig.parameters.keys()) - {"self"} - self._valid_params_cache[expected_type] = valid_params - except (ValueError, TypeError): - # Cache None to indicate fallback behavior - self._valid_params_cache[expected_type] = None +class DictConverter(TypeConverter): + """ + Converter for dict[K, V] where both K and V can be handled by converters. + Supports string keys and hashable class keys. + """ - return self._valid_params_cache[expected_type] + def can_convert(self, expected_type) -> bool: + return _get_origin(expected_type) is dict def convert(self, value, expected_type) -> Any: if value is None: return None - if isinstance(value, dict): - valid_params = self._get_valid_params(expected_type) + key_type, value_type = _get_args(expected_type) - if valid_params is not None: - filtered_data = {k: v for k, v in value.items() if k in valid_params} - return expected_type(**filtered_data) - else: - # Fallback if signature inspection failed - return expected_type(**value) - else: - # Try to construct from single value - return expected_type(value) + # Find the appropriate converters + key_converter = get_converter(key_type) + value_converter = get_converter(value_type) + return { + key_converter.convert(k, key_type): value_converter.convert(v, value_type) + for k, v in value.items() + } -class DataClassConverter(TypeConverter): - def __init__(self): - self._valid_fields_cache = {} - def can_convert(self, expected_type) -> bool: - return is_dataclass(expected_type) and inspect.isclass(expected_type) +_dict_converter = DictConverter() - def _get_valid_fields(self, expected_type): - # Check cache first - if expected_type not in self._valid_fields_cache: - try: - field_names = {field.name for field in fields(expected_type)} - self._valid_fields_cache[expected_type] = field_names - except (ValueError, TypeError): - # Cache None to indicate fallback behavior - self._valid_fields_cache[expected_type] = None - return self._valid_fields_cache[expected_type] +@lru_cache(maxsize=None) +def _get_signature(cls): + return inspect.signature(cls.__init__) - def convert(self, value, expected_type) -> Any: - if value is None: - return None - if isinstance(value, dict): - valid_fields = self._get_valid_fields(expected_type) +@lru_cache(maxsize=None) +def _get_type_hints(cls): + return get_type_hints(cls.__init__) + + +@lru_cache(maxsize=None) +def _get_dataclass_fields(cls): + return fields(cls) + + +@lru_cache(maxsize=None) +def _is_pydantic_model(cls): + return BaseModel is not None and inspect.isclass(cls) and issubclass(cls, BaseModel) - if valid_fields is not None: - filtered_data = {k: v for k, v in value.items() if k in valid_fields} - return expected_type(**filtered_data) - else: - # Fallback if field inspection failed - return expected_type(**value) - else: - # Try to construct from single value - return expected_type(value) +@lru_cache(maxsize=None) +def _get_args(cls): + return get_args(cls) -class PydanticConverter(TypeConverter): + +@lru_cache(maxsize=None) +def _get_origin(cls): + return get_origin(cls) + + +@lru_cache(maxsize=None) +def _is_dataclass(cls): + return is_dataclass(cls) + + +class ClassConverter(TypeConverter): + """ + Converts dictionaries to instances of desired types, supporting Pydantic models, + Python dataclasses or plain user-defined classes. + + This converter handles common scenarios for basic type conversion from dictionaries + to class instances. It supports: + - Dataclasses with simple field types + - Plain classes with __init__ parameters + - Nested classes and dataclasses + + Important limitations: + - This converter is NOT designed to support all possible type conversion scenarios + - It handles only straightforward cases with basic type annotations + - Complex type validation, nested generics, and advanced typing constructs + are not fully supported + + For complex type conversion scenarios, use: + - Pydantic models (recommended): Provides comprehensive validation, + advanced typing support, and better error messages + - Custom type converters: Define explicit conversion logic for your specific needs + - Explicit conversion in your input types classes + + This converter ignores extra fields in the input dictionary that don't match + class parameters or dataclass fields. + """ + + def _from_dict(self, cls, data: dict | list): + """Convert dict to plain class or dataclass, ignoring extra fields""" + + if _is_pydantic_model(cls): + return cls.model_validate(data) + + # here it is sufficient to handle list because input from client can only + # be parsed as list in most cases (like after parsing JSON or XML), not other + # types of sequences like tuple + if isinstance(data, list): + # require a type hint to work + obj_type_hint = _get_args(cls) + if obj_type_hint: + # good… + obj_type_converter = get_converter(obj_type_hint[0]) + return [ + obj_type_converter.convert(datum, obj_type_hint[0]) + for datum in data + ] + else: + # return data as-is (let if fail downstream if it must) + return data + + if _dict_converter.can_convert(cls): + return _dict_converter.convert(data, cls) + + if not isinstance(data, dict): + return data + + # Handle dataclasses + if _is_dataclass(cls): + return self._handle_dataclass(cls, data) + + # Handle plain classes + return self._handle_plain_class(cls, data) + + def _handle_dataclass(self, cls, data): + field_values = {} + for field in _get_dataclass_fields(cls): + if field.name in data: + field_type = field.type + value = data[field.name] + if value is None: + field_values[field.name] = None + else: + converter = get_converter(field_type) + field_values[field.name] = converter.convert(value, field_type) + return cls(**field_values) + + def _handle_plain_class(self, cls, data): + # Get type hints from __init__ + sig = _get_signature(cls) + type_hints = _get_type_hints(cls) + + init_params = {} + for param_name, _ in sig.parameters.items(): + if param_name == "self": + continue + + if param_name in data: + value = data[param_name] + + # Check if parameter has a type hint that's a class + if param_name in type_hints: + param_type = type_hints[param_name] + converter = get_converter(param_type) + init_params[param_name] = converter.convert(value, param_type) + else: + init_params[param_name] = value + return cls(**init_params) + + @lru_cache(maxsize=None) def can_convert(self, expected_type) -> bool: - if BaseModel is None: + # Must be a class + if not inspect.isclass(expected_type): + return False + + if is_dataclass(expected_type) or _is_pydantic_model(expected_type): + return True + + # Exclude built-in types + if expected_type.__module__ == "builtins": return False - return inspect.isclass(expected_type) and issubclass(expected_type, BaseModel) + + # Exclude typing module generics + if get_origin(expected_type) is not None: + return False + + # Must have a callable __init__ method + if not hasattr(expected_type, "__init__"): + return False + + return True def convert(self, value, expected_type) -> Any: if value is None: return None - if isinstance(value, dict): - # Use Pydantic's model_validate for proper validation and conversion - return expected_type.model_validate(value) - else: - # Try to construct from single value - return expected_type.model_validate(value) + return self._from_dict(expected_type, value) -# Add this new converter class after the existing converters class ListConverter(TypeConverter): """ Converter for list[T], Sequence[T], and tuple[T] where T @@ -341,60 +440,36 @@ class ListConverter(TypeConverter): def __init__(self, supported_origins=None): if supported_origins is None: supported_origins = {list, List, Sequence, SequenceABC, tuple} - self.supported_origins = supported_origins + self.supported_origins = frozenset(supported_origins) def can_convert(self, expected_type) -> bool: - origin = get_origin(expected_type) + origin = _get_origin(expected_type) if origin not in self.supported_origins: return False # Get the item type - args = get_args(expected_type) + args = _get_args(expected_type) if not args: return False - item_type = args[0] - - # Check if any class converter can handle the item type - for converter in class_converters: - if converter.can_convert(item_type): - return True - - return False + return True def convert(self, value, expected_type) -> Any: if value is None: return None - if not isinstance(value, list): - raise ValueError(f"Expected a list for {expected_type}, got {type(value)}") + origin = _get_origin(expected_type) + item_type = _get_args(expected_type)[0] - origin = get_origin(expected_type) - item_type = get_args(expected_type)[0] + item_converter = get_converter(item_type) - # Find the appropriate converter for the item type - item_converter = None - for converter in class_converters: - if converter.can_convert(item_type): - item_converter = converter - break + converted_items = [item_converter.convert(item, item_type) for item in value] - if item_converter is None: - raise ValueError(f"No converter found for item type {item_type}") - - # Convert each item in the list - converted_items = [] - for item in value: - converted_item = item_converter.convert(item, item_type) - converted_items.append(converted_item) - - # Return the appropriate collection type if origin in {list, List}: return converted_items - elif origin in {tuple}: + elif origin is tuple: return tuple(converted_items) else: - # For Sequence and other abstract types, default to list return converted_items @@ -418,11 +493,17 @@ def convert(self, value, expected_type) -> Any: class_converters: list[TypeConverter] = [ - DataClassConverter(), ClassConverter(), ListConverter(), + DictConverter(), ] -if BaseModel is not None: - # Insert PydanticConverter before ClassConverter to give it priority - class_converters.insert(-2, PydanticConverter()) + +@lru_cache(maxsize=None) +def get_converter(cls) -> TypeConverter: + _all_converters = converters + class_converters + + for converter in _all_converters: + if converter.can_convert(cls): + return converter + return _as_is_converter diff --git a/blacksheep/server/openapi/v3.py b/blacksheep/server/openapi/v3.py index a66cd931..67199eb1 100644 --- a/blacksheep/server/openapi/v3.py +++ b/blacksheep/server/openapi/v3.py @@ -7,16 +7,13 @@ from dataclasses import dataclass, fields, is_dataclass from datetime import date, datetime from enum import Enum, IntEnum -from types import UnionType, GenericAlias as GenericAlias_ -from typing import Any, Dict, Iterable, Sequence, Type, Union, TypeAlias +from types import GenericAlias as GenericAlias_ +from types import UnionType +from typing import Any, Dict, Iterable, Sequence, Type, TypeAlias, Union from typing import _AnnotatedAlias as AnnotatedAlias -from typing import _GenericAlias -from typing import get_type_hints +from typing import _GenericAlias, get_type_hints from uuid import UUID -# Alias to support both typing List[T]/list[T] as GenericAlias -GenericAlias: TypeAlias = _GenericAlias | GenericAlias_ - from guardpost import AuthenticationHandler from guardpost.common import AuthenticatedRequirement from openapidocs.common import Format, Serializer @@ -108,6 +105,10 @@ class JWTBearerAuthentication: is_pydantic_dataclass = ... +# Alias to support both typing List[T]/list[T] as GenericAlias +GenericAlias: TypeAlias = _GenericAlias | GenericAlias_ + + def _is_union_type(annotation): if isinstance(annotation, UnionType): # type: ignore return True @@ -557,7 +558,8 @@ def get_type_name_for_annotated( origin = args[0] if args else get_origin(object_type) annotations = object_type.__metadata__ annotations_repr = "And".join( - self.get_type_name(annotation, context_type_args) for annotation in annotations + self.get_type_name(annotation, context_type_args) + for annotation in annotations ) return f"{self.get_type_name(origin)}Of{annotations_repr}" @@ -593,7 +595,8 @@ def get_type_name( return self.get_type_name_for_annotated(object_type, context_type_args) if isinstance(object_type, GenericAlias) or isinstance(object_type, UnionType): return self.get_type_name_for_generic(object_type, context_type_args) - # Workaround for built-in collection types in Python 3.9+ to have them capitalized in schema names + # Workaround for built-in collection types in Python 3.9+ to have them + # capitalized in schema names if object_type in (list, tuple, set, dict) and hasattr(object_type, "__name__"): return object_type.__name__.capitalize() if hasattr(object_type, "__name__"): diff --git a/itests/test_auth_oidc.py b/itests/test_auth_oidc.py index c3681b28..03cbdadb 100644 --- a/itests/test_auth_oidc.py +++ b/itests/test_auth_oidc.py @@ -11,6 +11,7 @@ from urllib.parse import parse_qs, urlencode import pytest +from essentials.secrets import Secret from guardpost import Identity, Policy from guardpost.common import AuthenticatedRequirement @@ -128,7 +129,7 @@ def configure_test_oidc_cookie_auth_id_token( app, OpenIDSettings( client_id="067cee45-faf3-4c75-9fef-09f050bcc3ae", - client_secret=secret, + client_secret=Secret.from_plain_text(secret) if secret else None, authority=MOCKED_AUTHORITY, ), ) @@ -140,7 +141,7 @@ def configure_test_oidc_jwt_auth_id_token(app: Application, secret: str | None = app, OpenIDSettings( client_id=CLIENT_ID, - client_secret=secret, + client_secret=Secret.from_plain_text(secret) if secret else None, authority=MOCKED_AUTHORITY, ), auth_handler=JWTOpenIDTokensHandler( @@ -157,7 +158,7 @@ def configure_test_oidc_with_secret(app: Application): app, OpenIDSettings( client_id="067cee45-faf3-4c75-9fef-09f050bcc3ae", - client_secret="JUST_AN_EXAMPLE", + client_secret=Secret.from_plain_text("JUST_AN_EXAMPLE"), authority=MOCKED_AUTHORITY, ), ) @@ -939,7 +940,7 @@ async def test_oidc_handler_with_secret_and_audience_no_id_token( app, OpenIDSettings( client_id="067cee45-faf3-4c75-9fef-09f050bcc3ae", - client_secret="JUST_AN_EXAMPLE", + client_secret=Secret.from_plain_text("JUST_AN_EXAMPLE"), audience="api://default", authority=MOCKED_AUTHORITY, scope="read:todos", @@ -1084,7 +1085,7 @@ async def test_cookies_tokens_store_restoring_context( app, OpenIDSettings( client_id="067cee45-faf3-4c75-9fef-09f050bcc3ae", - client_secret="JUST_AN_EXAMPLE", + client_secret=Secret.from_plain_text("JUST_AN_EXAMPLE"), authority=MOCKED_AUTHORITY, ), auth_handler=CookiesOpenIDTokensHandler( diff --git a/tests/test_application.py b/tests/test_application.py index c29a2937..bc9fa15c 100644 --- a/tests/test_application.py +++ b/tests/test_application.py @@ -1741,8 +1741,8 @@ async def test_handler_from_files_and_form(app): @dataclass(init=False) class OtherInput: textfield: str - checkbox1: bool - checkbox2: bool + checkbox1: str + checkbox2: str def __init__( self, @@ -1752,8 +1752,8 @@ def __init__( **kwargs, ): self.textfield = textfield - self.checkbox1 = checkbox1 == "on" - self.checkbox2 = checkbox2 == "on" + self.checkbox1 = checkbox1 + self.checkbox2 = checkbox2 @app.router.post("/") async def home(files: FromFiles, other: FromForm[OtherInput]): @@ -1765,8 +1765,8 @@ async def home(files: FromFiles, other: FromForm[OtherInput]): assert file1.name == b"files" assert file1.file_name == b"red-dot.png" - assert other.value.checkbox1 is True - assert other.value.checkbox2 is False + assert other.value.checkbox1 == "on" + assert other.value.checkbox2 is None assert other.value.textfield == "Hello World!" await _multipart_mix_scenario(app) @@ -1781,8 +1781,8 @@ async def test_handler_from_form_handling_whole_multipart_with_class(app): @dataclass(init=False) class WholeInput: textfield: str - checkbox1: bool - checkbox2: bool + checkbox1: str + checkbox2: str files: list def __init__( @@ -1794,8 +1794,8 @@ def __init__( **kwargs, ): self.textfield = textfield - self.checkbox1 = checkbox1 == "on" - self.checkbox2 = checkbox2 == "on" + self.checkbox1 = checkbox1 + self.checkbox2 = checkbox2 self.files = files or [] @app.router.post("/") @@ -1808,8 +1808,8 @@ async def home(data: FromForm[WholeInput]): assert file1.name == b"files" assert file1.file_name == b"red-dot.png" - assert data.value.checkbox1 is True - assert data.value.checkbox2 is False + assert data.value.checkbox1 == "on" + assert data.value.checkbox2 is None assert data.value.textfield == "Hello World!" await _multipart_mix_scenario(app) diff --git a/tests/test_bindings.py b/tests/test_bindings.py index ff5e389d..b00112b1 100644 --- a/tests/test_bindings.py +++ b/tests/test_bindings.py @@ -1,5 +1,6 @@ import sys from dataclasses import dataclass +from datetime import datetime from typing import Any, List, Literal, Sequence, Set, Tuple, Type from uuid import UUID @@ -95,6 +96,161 @@ class ExamplePydanticModel(BaseModel): b: int +# Example plain classes + + +class ContactInfo: + def __init__(self, phone: str, email: str, created_at: datetime): + self.phone = phone + self.email = email + self.created_at = created_at + + +class PlainAddress: + def __init__(self, street: str, city: str, zip_code: str): + self.street = street + self.city = city + self.zip_code = zip_code + + +class PlainUser: + def __init__(self, name: str, email: str, age: int, address: PlainAddress): + self.name = name + self.email = email + self.age = age + self.address = address + + +class PlainUser2: + def __init__(self, name: str, email: str, age: int, addresses: list[PlainAddress]): + self.name = name + self.email = email + self.age = age + self.addresses = addresses + + +class PlainUserWithContacts: + def __init__( + self, name: str, email: str, age: int, contacts: dict[str, ContactInfo] + ): + self.name = name + self.email = email + self.age = age + self.contacts = contacts + + +class PlainUserWithContactsUUID: + def __init__( + self, name: str, email: str, age: int, contacts: dict[UUID, ContactInfo] + ): + self.name = name + self.email = email + self.age = age + self.contacts = contacts + + +class ContactInfoModel(BaseModel): + phone: str + email: str + created_at: datetime + + +class AddressModel(BaseModel): + street: str + city: str + zip_code: str + + +class UserModel(BaseModel): + name: str + email: str + age: int + address: AddressModel + + +class UserModel2(BaseModel): + name: str + email: str + age: int + addresses: list[AddressModel] + + +class UserModelWithContacts(BaseModel): + name: str + email: str + age: int + contacts: dict[str, ContactInfoModel] + + +class UserModelWithContactsUUID(BaseModel): + name: str + email: str + age: int + contacts: dict[UUID, ContactInfoModel] + + +@dataclass +class AddressDc: + street: str + city: str + zip_code: str + + +@dataclass +class ContactInfoDc: + phone: str + email: str + created_at: datetime + + +@dataclass +class UserDc: + name: str + email: str + age: int + address: AddressDc + + +@dataclass +class UserDcMix: + name: str + email: str + age: int + address: AddressModel + + +@dataclass +class UserDcWithContacts: + name: str + email: str + age: int + contacts: dict[str, ContactInfoDc] + + +@dataclass +class UserDcWithContactsUUID: + name: str + email: str + age: int + contacts: dict[UUID, ContactInfoDc] + + +@dataclass +class UserDc2: + name: str + email: str + age: int + addresses: list[AddressDc] + + +@dataclass +class UserDc2Mix: + name: str + email: str + age: int + addresses: list[AddressModel] # mix pydantic model and dataclass — weird if done! + + async def test_from_body_json_binding(): request = Request("POST", b"/", [JSONContentType]).with_content( JSONContent({"a": "world", "b": 9000}) @@ -804,6 +960,74 @@ async def test_from_body_json_binding_ignore_extra_parameters( assert value.b == expected_b +@pytest.mark.parametrize("expected_type", [PlainUser, UserModel, UserDc, UserDcMix]) +async def test_from_body_json_binding_ignore_extra_parameters_nested_1(expected_type): + # Test that extra properties are ignored also in child properties + plain_data = { + "name": "Jane", + "email": "jane@example.com", + "age": 25, + "extra_field": "ignored", + "address": { + "street": "456 Oak Ave", + "city": "Seattle", + "zip_code": "98101", + "extra_address_field": "ignored", + }, + } + + request = Request("POST", b"/", [JSONContentType]).with_content( + JSONContent(plain_data) + ) + + parameter = JSONBinder(expected_type) + + value = await parameter.get_value(request) + + assert isinstance(value, expected_type) + assert value.name == "Jane" + assert value.address.street == "456 Oak Ave" + + +@pytest.mark.parametrize("expected_type", [PlainUser2, UserModel2, UserDc2, UserDc2Mix]) +async def test_from_body_json_binding_ignore_extra_parameters_nested_2(expected_type): + # Test that extra properties are ignored also in child properties + plain_data = { + "name": "Jane", + "email": "jane@example.com", + "age": 25, + "extra_field": "ignored", + "addresses": [ + { + "street": "456 Oak Ave", + "city": "Seattle", + "zip_code": "98101", + "extra_address_field": "ignored", + }, + { + "street": "Foo", + "city": "Foo City", + "zip_code": "00888", + "extra_address_field": "ignored", + "some_other_field": 3, + }, + ], + } + + request = Request("POST", b"/", [JSONContentType]).with_content( + JSONContent(plain_data) + ) + + parameter = JSONBinder(expected_type) + + value = await parameter.get_value(request) + + assert isinstance(value, expected_type) + assert value.name == "Jane" + assert value.addresses[0].street == "456 Oak Ave" + assert value.addresses[1].street == "Foo" + + @pytest.mark.parametrize( "collection_type,model_class,expected_type", [ @@ -863,3 +1087,166 @@ async def test_from_body_json_binding_collections( expected_values = [("first", 100), ("second", 200), ("third", 300)] assert item.a == expected_values[i][0] assert item.b == expected_values[i][1] + + +@pytest.mark.parametrize( + "expected_type", + [ + PlainUserWithContactsUUID, + UserModelWithContactsUUID, + UserDcWithContactsUUID, + ], +) +async def test_from_body_json_binding_dict_uuid_custom_class(expected_type): + """Test conversion of dict[UUID, CustomClass] from JSON body""" + uuid_home = "b0c1f822-b63c-475e-9f2e-b6406bafcc2b" + uuid_work = "d5fd0cde-4ad6-4b61-a5b1-5b8e6d48cebe" + uuid_mobile = "00000000-0000-0000-0000-000000000000" + + data = { + "name": "Jane", + "email": "jane@example.com", + "age": 25, + "contacts": { + uuid_home: { + "phone": "555-1234", + "email": "jane.home@example.com", + "created_at": "2023-01-15T10:30:00", + "extra_field": "should be ignored", + }, + uuid_work: { + "phone": "555-5678", + "email": "jane.work@example.com", + "created_at": "2023-02-20T14:45:00", + "extra_field": "should be ignored", + }, + uuid_mobile: { + "phone": "555-9999", + "email": "jane.mobile@example.com", + "created_at": "2023-03-25T08:15:00", + "extra_field": "should be ignored", + }, + }, + } + + request = Request("POST", b"/", [JSONContentType]).with_content(JSONContent(data)) + + parameter = JSONBinder(expected_type) + + value = await parameter.get_value(request) + + assert isinstance(value, expected_type) + assert value.name == "Jane" + assert value.email == "jane@example.com" + assert value.age == 25 + assert isinstance(value.contacts, dict) + assert len(value.contacts) == 3 + + # Check that keys are UUIDs + for key in value.contacts.keys(): + assert isinstance(key, UUID) + + # Check specific values + assert UUID(uuid_home) in value.contacts + assert UUID(uuid_work) in value.contacts + assert UUID(uuid_mobile) in value.contacts + assert value.contacts[UUID(uuid_home)].phone == "555-1234" + assert value.contacts[UUID(uuid_home)].email == "jane.home@example.com" + assert value.contacts[UUID(uuid_home)].created_at == datetime( + 2023, 1, 15, 10, 30, 0 + ) + assert value.contacts[UUID(uuid_work)].phone == "555-5678" + assert value.contacts[UUID(uuid_work)].email == "jane.work@example.com" + assert value.contacts[UUID(uuid_work)].created_at == datetime( + 2023, 2, 20, 14, 45, 0 + ) + assert value.contacts[UUID(uuid_mobile)].created_at == datetime( + 2023, 3, 25, 8, 15, 0 + ) + + +@pytest.mark.parametrize( + "expected_type", + [ + PlainUserWithContacts, + UserModelWithContacts, + UserDcWithContacts, + ], +) +async def test_from_body_json_binding_dict_str_custom_class(expected_type): + """Test conversion of dict[str, CustomClass] from JSON body""" + data = { + "name": "Jane", + "email": "jane@example.com", + "age": 25, + "contacts": { + "home": { + "phone": "555-1234", + "email": "jane.home@example.com", + "created_at": "2023-01-15T10:30:00", + }, + "work": { + "phone": "555-5678", + "email": "jane.work@example.com", + "created_at": "2023-02-20T14:45:00", + }, + "mobile": { + "phone": "555-9999", + "email": "jane.mobile@example.com", + "created_at": "2023-03-25T08:15:00", + }, + }, + } + + request = Request("POST", b"/", [JSONContentType]).with_content(JSONContent(data)) + + parameter = JSONBinder(expected_type) + + value = await parameter.get_value(request) + + assert isinstance(value, expected_type) + assert value.name == "Jane" + assert value.email == "jane@example.com" + assert value.age == 25 + assert isinstance(value.contacts, dict) + assert len(value.contacts) == 3 + assert "home" in value.contacts + assert "work" in value.contacts + assert "mobile" in value.contacts + assert value.contacts["home"].phone == "555-1234" + assert value.contacts["home"].email == "jane.home@example.com" + assert value.contacts["home"].created_at == datetime(2023, 1, 15, 10, 30, 0) + assert value.contacts["work"].phone == "555-5678" + assert value.contacts["work"].email == "jane.work@example.com" + assert value.contacts["work"].created_at == datetime(2023, 2, 20, 14, 45, 0) + + +@pytest.mark.parametrize( + "expected_type", + [ + PlainUserWithContactsUUID, + UserModelWithContactsUUID, + UserDcWithContactsUUID, + ], +) +async def test_from_body_json_binding_empty_dict(expected_type): + """Test conversion with empty dict""" + data = { + "name": "Jane", + "email": "jane@example.com", + "age": 25, + "contacts": {}, + } + + request = Request("POST", b"/", [JSONContentType]).with_content(JSONContent(data)) + + parameter = JSONBinder(expected_type) + + value = await parameter.get_value(request) + + assert isinstance(value, expected_type) + assert value.name == "Jane" + assert value.email == "jane@example.com" + assert value.age == 25 + assert isinstance(value.contacts, dict) + assert len(value.contacts) == 0 diff --git a/tests/test_openapi_v3.py b/tests/test_openapi_v3.py index 3f13acff..e8e9a94f 100644 --- a/tests/test_openapi_v3.py +++ b/tests/test_openapi_v3.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from datetime import date, datetime from enum import IntEnum -from typing import Generic, List, Mapping, Sequence, TypeVar, Union +from typing import Generic, Mapping, Sequence, TypeVar, Union from uuid import UUID if sys.version_info >= (3, 9): @@ -268,28 +268,24 @@ def get_cats_annotated_api() -> Application: delete = app.router.delete @get("/api/cats") - def get_cats_annotated() -> Annotated[Response, PaginatedSet[Cat]]: - ... + def get_cats_annotated() -> Annotated[Response, PaginatedSet[Cat]]: ... @get("/api/cats_2") - def get_cats_annotated_2() -> Annotated[Response, PaginatedSet[Cat], list[Cat]]: - ... + def get_cats_annotated_2() -> Annotated[Response, PaginatedSet[Cat], list[Cat]]: ... @get("/api/cats_3") - def get_cats_annotated_3() -> Annotated[Response, PaginatedSet[Cat] | list[Cat]]: - ... + def get_cats_annotated_3() -> ( + Annotated[Response, PaginatedSet[Cat] | list[Cat]] + ): ... @get("/api/cats/{cat_id}") - def get_cat_details_annotated(cat_id: int) -> Annotated[Response, CatDetails]: - ... + def get_cat_details_annotated(cat_id: int) -> Annotated[Response, CatDetails]: ... @post("/api/cats") - def create_cat_annotated(input: CreateCatInput) -> Annotated[Response, Cat]: - ... + def create_cat_annotated(input: CreateCatInput) -> Annotated[Response, Cat]: ... @delete("/api/cats/{cat_id}") - def delete_cat_annotated(cat_id: int) -> Annotated[Response, None]: - ... + def delete_cat_annotated(cat_id: int) -> Annotated[Response, None]: ... return app