diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 9dcd5cc8..3c4dbee7 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.0.0-beta.5" + ".": "3.0.0-beta.6" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 85fdc0d1..f1346734 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## 3.0.0-beta.6 (2025-09-08) + +Full Changelog: [v3.0.0-beta.5...v3.0.0-beta.6](https://github.com/digitalocean/gradient-python/compare/v3.0.0-beta.5...v3.0.0-beta.6) + +### Features + +* improve future compat with pydantic v3 ([9632b89](https://github.com/digitalocean/gradient-python/commit/9632b892575f89d6f76a764cbf5c51f902dd05b4)) +* **types:** replace List[str] with SequenceNotStr in params ([456bd36](https://github.com/digitalocean/gradient-python/commit/456bd36ad4468639e77aadd18d90f34c6d3839fa)) + + +### Chores + +* **internal:** move mypy configurations to `pyproject.toml` file ([c274b76](https://github.com/digitalocean/gradient-python/commit/c274b766f30830d3992934d83b54bc8b1a7a66a0)) +* **tests:** simplify `get_platform` test ([7ee2e88](https://github.com/digitalocean/gradient-python/commit/7ee2e882d95c6de785fcd951054a5b88a8877116)) + ## 3.0.0-beta.5 (2025-09-08) Full Changelog: [v3.0.0-beta.4...v3.0.0-beta.5](https://github.com/digitalocean/gradient-python/compare/v3.0.0-beta.4...v3.0.0-beta.5) diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 9a8e555e..00000000 --- a/mypy.ini +++ /dev/null @@ -1,50 +0,0 @@ -[mypy] -pretty = True -show_error_codes = True - -# Exclude _files.py because mypy isn't smart enough to apply -# the correct type narrowing and as this is an internal module -# it's fine to just use Pyright. -# -# We also exclude our `tests` as mypy doesn't always infer -# types correctly and Pyright will still catch any type errors. -exclude = ^(src/gradient/_files\.py|_dev/.*\.py|tests/.*)$ - -strict_equality = True -implicit_reexport = True -check_untyped_defs = True -no_implicit_optional = True - -warn_return_any = True -warn_unreachable = True -warn_unused_configs = True - -# Turn these options off as it could cause conflicts -# with the Pyright options. -warn_unused_ignores = False -warn_redundant_casts = False - -disallow_any_generics = True -disallow_untyped_defs = True -disallow_untyped_calls = True -disallow_subclassing_any = True -disallow_incomplete_defs = True -disallow_untyped_decorators = True -cache_fine_grained = True - -# By default, mypy reports an error if you assign a value to the result -# of a function call that doesn't return anything. We do this in our test -# cases: -# ``` -# result = ... -# assert result is None -# ``` -# Changing this codegen to make mypy happy would increase complexity -# and would not be worth it. -disable_error_code = func-returns-value,overload-cannot-match - -# https://github.com/python/mypy/issues/12162 -[mypy.overrides] -module = "black.files.*" -ignore_errors = true -ignore_missing_imports = true diff --git a/pyproject.toml b/pyproject.toml index bde954ca..d6232c97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "gradient" -version = "3.0.0-beta.5" +version = "3.0.0-beta.6" description = "The official Python library for the Gradient API" dynamic = ["readme"] license = "Apache-2.0" @@ -54,7 +54,6 @@ dev-dependencies = [ "dirty-equals>=0.6.0", "importlib-metadata>=6.7.0", "rich>=13.7.1", - "nest_asyncio==1.6.0", "pytest-xdist>=3.6.1", ] @@ -144,6 +143,58 @@ reportOverlappingOverload = false reportImportCycles = false reportPrivateUsage = false +[tool.mypy] +pretty = true +show_error_codes = true + +# Exclude _files.py because mypy isn't smart enough to apply +# the correct type narrowing and as this is an internal module +# it's fine to just use Pyright. +# +# We also exclude our `tests` as mypy doesn't always infer +# types correctly and Pyright will still catch any type errors. +exclude = ['src/gradient/_files.py', '_dev/.*.py', 'tests/.*'] + +strict_equality = true +implicit_reexport = true +check_untyped_defs = true +no_implicit_optional = true + +warn_return_any = true +warn_unreachable = true +warn_unused_configs = true + +# Turn these options off as it could cause conflicts +# with the Pyright options. +warn_unused_ignores = false +warn_redundant_casts = false + +disallow_any_generics = true +disallow_untyped_defs = true +disallow_untyped_calls = true +disallow_subclassing_any = true +disallow_incomplete_defs = true +disallow_untyped_decorators = true +cache_fine_grained = true + +# By default, mypy reports an error if you assign a value to the result +# of a function call that doesn't return anything. We do this in our test +# cases: +# ``` +# result = ... +# assert result is None +# ``` +# Changing this codegen to make mypy happy would increase complexity +# and would not be worth it. +disable_error_code = "func-returns-value,overload-cannot-match" + +# https://github.com/python/mypy/issues/12162 +[[tool.mypy.overrides]] +module = "black.files.*" +ignore_errors = true +ignore_missing_imports = true + + [tool.ruff] line-length = 120 output-format = "grouped" diff --git a/requirements-dev.lock b/requirements-dev.lock index 7a0f60ab..af44e06b 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -75,7 +75,6 @@ multidict==6.4.4 mypy==1.14.1 mypy-extensions==1.0.0 # via mypy -nest-asyncio==1.6.0 nodeenv==1.8.0 # via pyright nox==2023.4.22 diff --git a/src/gradient/_base_client.py b/src/gradient/_base_client.py index 74f3c57a..58b14617 100644 --- a/src/gradient/_base_client.py +++ b/src/gradient/_base_client.py @@ -59,7 +59,7 @@ ModelBuilderProtocol, ) from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping -from ._compat import PYDANTIC_V2, model_copy, model_dump +from ._compat import PYDANTIC_V1, model_copy, model_dump from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type from ._response import ( APIResponse, @@ -232,7 +232,7 @@ def _set_private_attributes( model: Type[_T], options: FinalRequestOptions, ) -> None: - if PYDANTIC_V2 and getattr(self, "__pydantic_private__", None) is None: + if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None: self.__pydantic_private__ = {} self._model = model @@ -320,7 +320,7 @@ def _set_private_attributes( client: AsyncAPIClient, options: FinalRequestOptions, ) -> None: - if PYDANTIC_V2 and getattr(self, "__pydantic_private__", None) is None: + if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None: self.__pydantic_private__ = {} self._model = model diff --git a/src/gradient/_compat.py b/src/gradient/_compat.py index 92d9ee61..bdef67f0 100644 --- a/src/gradient/_compat.py +++ b/src/gradient/_compat.py @@ -12,14 +12,13 @@ _T = TypeVar("_T") _ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel) -# --------------- Pydantic v2 compatibility --------------- +# --------------- Pydantic v2, v3 compatibility --------------- # Pyright incorrectly reports some of our functions as overriding a method when they don't # pyright: reportIncompatibleMethodOverride=false -PYDANTIC_V2 = pydantic.VERSION.startswith("2.") +PYDANTIC_V1 = pydantic.VERSION.startswith("1.") -# v1 re-exports if TYPE_CHECKING: def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001 @@ -44,90 +43,92 @@ def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001 ... else: - if PYDANTIC_V2: - from pydantic.v1.typing import ( + # v1 re-exports + if PYDANTIC_V1: + from pydantic.typing import ( get_args as get_args, is_union as is_union, get_origin as get_origin, is_typeddict as is_typeddict, is_literal_type as is_literal_type, ) - from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime + from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime else: - from pydantic.typing import ( + from ._utils import ( get_args as get_args, is_union as is_union, get_origin as get_origin, + parse_date as parse_date, is_typeddict as is_typeddict, + parse_datetime as parse_datetime, is_literal_type as is_literal_type, ) - from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime # refactored config if TYPE_CHECKING: from pydantic import ConfigDict as ConfigDict else: - if PYDANTIC_V2: - from pydantic import ConfigDict - else: + if PYDANTIC_V1: # TODO: provide an error message here? ConfigDict = None + else: + from pydantic import ConfigDict as ConfigDict # renamed methods / properties def parse_obj(model: type[_ModelT], value: object) -> _ModelT: - if PYDANTIC_V2: - return model.model_validate(value) - else: + if PYDANTIC_V1: return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + else: + return model.model_validate(value) def field_is_required(field: FieldInfo) -> bool: - if PYDANTIC_V2: - return field.is_required() - return field.required # type: ignore + if PYDANTIC_V1: + return field.required # type: ignore + return field.is_required() def field_get_default(field: FieldInfo) -> Any: value = field.get_default() - if PYDANTIC_V2: - from pydantic_core import PydanticUndefined - - if value == PydanticUndefined: - return None + if PYDANTIC_V1: return value + from pydantic_core import PydanticUndefined + + if value == PydanticUndefined: + return None return value def field_outer_type(field: FieldInfo) -> Any: - if PYDANTIC_V2: - return field.annotation - return field.outer_type_ # type: ignore + if PYDANTIC_V1: + return field.outer_type_ # type: ignore + return field.annotation def get_model_config(model: type[pydantic.BaseModel]) -> Any: - if PYDANTIC_V2: - return model.model_config - return model.__config__ # type: ignore + if PYDANTIC_V1: + return model.__config__ # type: ignore + return model.model_config def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]: - if PYDANTIC_V2: - return model.model_fields - return model.__fields__ # type: ignore + if PYDANTIC_V1: + return model.__fields__ # type: ignore + return model.model_fields def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT: - if PYDANTIC_V2: - return model.model_copy(deep=deep) - return model.copy(deep=deep) # type: ignore + if PYDANTIC_V1: + return model.copy(deep=deep) # type: ignore + return model.model_copy(deep=deep) def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str: - if PYDANTIC_V2: - return model.model_dump_json(indent=indent) - return model.json(indent=indent) # type: ignore + if PYDANTIC_V1: + return model.json(indent=indent) # type: ignore + return model.model_dump_json(indent=indent) def model_dump( @@ -139,14 +140,14 @@ def model_dump( warnings: bool = True, mode: Literal["json", "python"] = "python", ) -> dict[str, Any]: - if PYDANTIC_V2 or hasattr(model, "model_dump"): + if (not PYDANTIC_V1) or hasattr(model, "model_dump"): return model.model_dump( mode=mode, exclude=exclude, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, # warnings are not supported in Pydantic v1 - warnings=warnings if PYDANTIC_V2 else True, + warnings=True if PYDANTIC_V1 else warnings, ) return cast( "dict[str, Any]", @@ -159,9 +160,9 @@ def model_dump( def model_parse(model: type[_ModelT], data: Any) -> _ModelT: - if PYDANTIC_V2: - return model.model_validate(data) - return model.parse_obj(data) # pyright: ignore[reportDeprecated] + if PYDANTIC_V1: + return model.parse_obj(data) # pyright: ignore[reportDeprecated] + return model.model_validate(data) # generic models @@ -170,17 +171,16 @@ def model_parse(model: type[_ModelT], data: Any) -> _ModelT: class GenericModel(pydantic.BaseModel): ... else: - if PYDANTIC_V2: + if PYDANTIC_V1: + import pydantic.generics + + class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ... + else: # there no longer needs to be a distinction in v2 but # we still have to create our own subclass to avoid # inconsistent MRO ordering errors class GenericModel(pydantic.BaseModel): ... - else: - import pydantic.generics - - class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ... - # cached properties if TYPE_CHECKING: diff --git a/src/gradient/_models.py b/src/gradient/_models.py index 92f7c10b..3a6017ef 100644 --- a/src/gradient/_models.py +++ b/src/gradient/_models.py @@ -50,7 +50,7 @@ strip_annotated_type, ) from ._compat import ( - PYDANTIC_V2, + PYDANTIC_V1, ConfigDict, GenericModel as BaseGenericModel, get_args, @@ -81,11 +81,7 @@ class _ConfigProtocol(Protocol): class BaseModel(pydantic.BaseModel): - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict( - extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true")) - ) - else: + if PYDANTIC_V1: @property @override @@ -95,6 +91,10 @@ def model_fields_set(self) -> set[str]: class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] extra: Any = pydantic.Extra.allow # type: ignore + else: + model_config: ClassVar[ConfigDict] = ConfigDict( + extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true")) + ) def to_dict( self, @@ -215,25 +215,25 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride] if key not in model_fields: parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value - if PYDANTIC_V2: - _extra[key] = parsed - else: + if PYDANTIC_V1: _fields_set.add(key) fields_values[key] = parsed + else: + _extra[key] = parsed object.__setattr__(m, "__dict__", fields_values) - if PYDANTIC_V2: - # these properties are copied from Pydantic's `model_construct()` method - object.__setattr__(m, "__pydantic_private__", None) - object.__setattr__(m, "__pydantic_extra__", _extra) - object.__setattr__(m, "__pydantic_fields_set__", _fields_set) - else: + if PYDANTIC_V1: # init_private_attributes() does not exist in v2 m._init_private_attributes() # type: ignore # copied from Pydantic v1's `construct()` method object.__setattr__(m, "__fields_set__", _fields_set) + else: + # these properties are copied from Pydantic's `model_construct()` method + object.__setattr__(m, "__pydantic_private__", None) + object.__setattr__(m, "__pydantic_extra__", _extra) + object.__setattr__(m, "__pydantic_fields_set__", _fields_set) return m @@ -243,7 +243,7 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride] # although not in practice model_construct = construct - if not PYDANTIC_V2: + if PYDANTIC_V1: # we define aliases for some of the new pydantic v2 methods so # that we can just document these methods without having to specify # a specific pydantic version as some users may not know which @@ -363,10 +363,10 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object: if value is None: return field_get_default(field) - if PYDANTIC_V2: - type_ = field.annotation - else: + if PYDANTIC_V1: type_ = cast(type, field.outer_type_) # type: ignore + else: + type_ = field.annotation # type: ignore if type_ is None: raise RuntimeError(f"Unexpected field type is None for {key}") @@ -375,7 +375,7 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object: def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None: - if not PYDANTIC_V2: + if PYDANTIC_V1: # TODO return None @@ -628,30 +628,30 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, for variant in get_args(union): variant = strip_annotated_type(variant) if is_basemodel_type(variant): - if PYDANTIC_V2: - field = _extract_field_schema_pv2(variant, discriminator_field_name) - if not field: + if PYDANTIC_V1: + field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + if not field_info: continue # Note: if one variant defines an alias then they all should - discriminator_alias = field.get("serialization_alias") - - field_schema = field["schema"] + discriminator_alias = field_info.alias - if field_schema["type"] == "literal": - for entry in cast("LiteralSchema", field_schema)["expected"]: + if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation): + for entry in get_args(annotation): if isinstance(entry, str): mapping[entry] = variant else: - field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] - if not field_info: + field = _extract_field_schema_pv2(variant, discriminator_field_name) + if not field: continue # Note: if one variant defines an alias then they all should - discriminator_alias = field_info.alias + discriminator_alias = field.get("serialization_alias") - if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation): - for entry in get_args(annotation): + field_schema = field["schema"] + + if field_schema["type"] == "literal": + for entry in cast("LiteralSchema", field_schema)["expected"]: if isinstance(entry, str): mapping[entry] = variant @@ -714,7 +714,7 @@ class GenericModel(BaseGenericModel, BaseModel): pass -if PYDANTIC_V2: +if not PYDANTIC_V1: from pydantic import TypeAdapter as _TypeAdapter _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter)) @@ -782,12 +782,12 @@ class FinalRequestOptions(pydantic.BaseModel): json_data: Union[Body, None] = None extra_json: Union[AnyMapping, None] = None - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) - else: + if PYDANTIC_V1: class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] arbitrary_types_allowed: bool = True + else: + model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) def get_max_retries(self, max_retries: int) -> int: if isinstance(self.max_retries, NotGiven): @@ -820,9 +820,9 @@ def construct( # type: ignore key: strip_not_given(value) for key, value in values.items() } - if PYDANTIC_V2: - return super().model_construct(_fields_set, **kwargs) - return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated] + if PYDANTIC_V1: + return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated] + return super().model_construct(_fields_set, **kwargs) if not TYPE_CHECKING: # type checkers incorrectly complain about this assignment diff --git a/src/gradient/_utils/__init__.py b/src/gradient/_utils/__init__.py index ca547ce5..dc64e29a 100644 --- a/src/gradient/_utils/__init__.py +++ b/src/gradient/_utils/__init__.py @@ -10,7 +10,6 @@ lru_cache as lru_cache, is_mapping as is_mapping, is_tuple_t as is_tuple_t, - parse_date as parse_date, is_iterable as is_iterable, is_sequence as is_sequence, coerce_float as coerce_float, @@ -23,7 +22,6 @@ coerce_boolean as coerce_boolean, coerce_integer as coerce_integer, file_from_path as file_from_path, - parse_datetime as parse_datetime, strip_not_given as strip_not_given, deepcopy_minimal as deepcopy_minimal, get_async_library as get_async_library, @@ -32,6 +30,13 @@ maybe_coerce_boolean as maybe_coerce_boolean, maybe_coerce_integer as maybe_coerce_integer, ) +from ._compat import ( + get_args as get_args, + is_union as is_union, + get_origin as get_origin, + is_typeddict as is_typeddict, + is_literal_type as is_literal_type, +) from ._typing import ( is_list_type as is_list_type, is_union_type as is_union_type, @@ -56,3 +61,4 @@ function_has_argument as function_has_argument, assert_signatures_in_sync as assert_signatures_in_sync, ) +from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime diff --git a/src/gradient/_utils/_compat.py b/src/gradient/_utils/_compat.py new file mode 100644 index 00000000..dd703233 --- /dev/null +++ b/src/gradient/_utils/_compat.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +import sys +import typing_extensions +from typing import Any, Type, Union, Literal, Optional +from datetime import date, datetime +from typing_extensions import get_args as _get_args, get_origin as _get_origin + +from .._types import StrBytesIntFloat +from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime + +_LITERAL_TYPES = {Literal, typing_extensions.Literal} + + +def get_args(tp: type[Any]) -> tuple[Any, ...]: + return _get_args(tp) + + +def get_origin(tp: type[Any]) -> type[Any] | None: + return _get_origin(tp) + + +def is_union(tp: Optional[Type[Any]]) -> bool: + if sys.version_info < (3, 10): + return tp is Union # type: ignore[comparison-overlap] + else: + import types + + return tp is Union or tp is types.UnionType + + +def is_typeddict(tp: Type[Any]) -> bool: + return typing_extensions.is_typeddict(tp) + + +def is_literal_type(tp: Type[Any]) -> bool: + return get_origin(tp) in _LITERAL_TYPES + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + return _parse_date(value) + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + return _parse_datetime(value) diff --git a/src/gradient/_utils/_datetime_parse.py b/src/gradient/_utils/_datetime_parse.py new file mode 100644 index 00000000..7cb9d9e6 --- /dev/null +++ b/src/gradient/_utils/_datetime_parse.py @@ -0,0 +1,136 @@ +""" +This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py +without the Pydantic v1 specific errors. +""" + +from __future__ import annotations + +import re +from typing import Dict, Union, Optional +from datetime import date, datetime, timezone, timedelta + +from .._types import StrBytesIntFloat + +date_expr = r"(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})" +time_expr = ( + r"(?P\d{1,2}):(?P\d{1,2})" + r"(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?" + r"(?PZ|[+-]\d{2}(?::?\d{2})?)?$" +) + +date_re = re.compile(f"{date_expr}$") +datetime_re = re.compile(f"{date_expr}[T ]{time_expr}") + + +EPOCH = datetime(1970, 1, 1) +# if greater than this, the number is in ms, if less than or equal it's in seconds +# (in seconds this is 11th October 2603, in ms it's 20th August 1970) +MS_WATERSHED = int(2e10) +# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9 +MAX_NUMBER = int(3e20) + + +def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]: + if isinstance(value, (int, float)): + return value + try: + return float(value) + except ValueError: + return None + except TypeError: + raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None + + +def _from_unix_seconds(seconds: Union[int, float]) -> datetime: + if seconds > MAX_NUMBER: + return datetime.max + elif seconds < -MAX_NUMBER: + return datetime.min + + while abs(seconds) > MS_WATERSHED: + seconds /= 1000 + dt = EPOCH + timedelta(seconds=seconds) + return dt.replace(tzinfo=timezone.utc) + + +def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]: + if value == "Z": + return timezone.utc + elif value is not None: + offset_mins = int(value[-2:]) if len(value) > 3 else 0 + offset = 60 * int(value[1:3]) + offset_mins + if value[0] == "-": + offset = -offset + return timezone(timedelta(minutes=offset)) + else: + return None + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + """ + Parse a datetime/int/float/string and return a datetime.datetime. + + This function supports time zone offsets. When the input contains one, + the output uses a timezone with a fixed offset from UTC. + + Raise ValueError if the input is well formatted but not a valid datetime. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, datetime): + return value + + number = _get_numeric(value, "datetime") + if number is not None: + return _from_unix_seconds(number) + + if isinstance(value, bytes): + value = value.decode() + + assert not isinstance(value, (float, int)) + + match = datetime_re.match(value) + if match is None: + raise ValueError("invalid datetime format") + + kw = match.groupdict() + if kw["microsecond"]: + kw["microsecond"] = kw["microsecond"].ljust(6, "0") + + tzinfo = _parse_timezone(kw.pop("tzinfo")) + kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} + kw_["tzinfo"] = tzinfo + + return datetime(**kw_) # type: ignore + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + """ + Parse a date/int/float/string and return a datetime.date. + + Raise ValueError if the input is well formatted but not a valid date. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, date): + if isinstance(value, datetime): + return value.date() + else: + return value + + number = _get_numeric(value, "date") + if number is not None: + return _from_unix_seconds(number).date() + + if isinstance(value, bytes): + value = value.decode() + + assert not isinstance(value, (float, int)) + match = date_re.match(value) + if match is None: + raise ValueError("invalid date format") + + kw = {k: int(v) for k, v in match.groupdict().items()} + + try: + return date(**kw) + except ValueError: + raise ValueError("invalid date format") from None diff --git a/src/gradient/_utils/_transform.py b/src/gradient/_utils/_transform.py index b0cc20a7..c19124f0 100644 --- a/src/gradient/_utils/_transform.py +++ b/src/gradient/_utils/_transform.py @@ -16,18 +16,20 @@ lru_cache, is_mapping, is_iterable, + is_sequence, ) from .._files import is_base64_file_input +from ._compat import get_origin, is_typeddict from ._typing import ( is_list_type, is_union_type, extract_type_arg, is_iterable_type, is_required_type, + is_sequence_type, is_annotated_type, strip_annotated_type, ) -from .._compat import get_origin, model_dump, is_typeddict _T = TypeVar("_T") @@ -167,6 +169,8 @@ def _transform_recursive( Defaults to the same value as the `annotation` argument. """ + from .._compat import model_dump + if inner_type is None: inner_type = annotation @@ -184,6 +188,8 @@ def _transform_recursive( (is_list_type(stripped_type) and is_list(data)) # Iterable[T] or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + # Sequence[T] + or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str)) ): # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually # intended as an iterable, so we don't transform it. @@ -329,6 +335,8 @@ async def _async_transform_recursive( Defaults to the same value as the `annotation` argument. """ + from .._compat import model_dump + if inner_type is None: inner_type = annotation @@ -346,6 +354,8 @@ async def _async_transform_recursive( (is_list_type(stripped_type) and is_list(data)) # Iterable[T] or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + # Sequence[T] + or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str)) ): # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually # intended as an iterable, so we don't transform it. diff --git a/src/gradient/_utils/_typing.py b/src/gradient/_utils/_typing.py index 845cd6b2..193109f3 100644 --- a/src/gradient/_utils/_typing.py +++ b/src/gradient/_utils/_typing.py @@ -15,7 +15,7 @@ from ._utils import lru_cache from .._types import InheritsGeneric -from .._compat import is_union as _is_union +from ._compat import is_union as _is_union def is_annotated_type(typ: type) -> bool: diff --git a/src/gradient/_utils/_utils.py b/src/gradient/_utils/_utils.py index ea3cf3f2..f0818595 100644 --- a/src/gradient/_utils/_utils.py +++ b/src/gradient/_utils/_utils.py @@ -22,7 +22,6 @@ import sniffio from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike -from .._compat import parse_date as parse_date, parse_datetime as parse_datetime _T = TypeVar("_T") _TupleT = TypeVar("_TupleT", bound=Tuple[object, ...]) diff --git a/src/gradient/_version.py b/src/gradient/_version.py index c7adeab4..81080cc3 100644 --- a/src/gradient/_version.py +++ b/src/gradient/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "gradient" -__version__ = "3.0.0-beta.5" # x-release-please-version +__version__ = "3.0.0-beta.6" # x-release-please-version diff --git a/src/gradient/resources/agents/agents.py b/src/gradient/resources/agents/agents.py index 67f7f4ae..8d06584c 100644 --- a/src/gradient/resources/agents/agents.py +++ b/src/gradient/resources/agents/agents.py @@ -2,8 +2,6 @@ from __future__ import annotations -from typing import List - import httpx from .routes import ( @@ -22,7 +20,7 @@ agent_update_params, agent_update_status_params, ) -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ..._utils import maybe_transform, async_maybe_transform from .api_keys import ( APIKeysResource, @@ -183,13 +181,13 @@ def create( anthropic_key_uuid: str | NotGiven = NOT_GIVEN, description: str | NotGiven = NOT_GIVEN, instruction: str | NotGiven = NOT_GIVEN, - knowledge_base_uuid: List[str] | NotGiven = NOT_GIVEN, + knowledge_base_uuid: SequenceNotStr[str] | NotGiven = NOT_GIVEN, model_uuid: str | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, openai_key_uuid: str | NotGiven = NOT_GIVEN, project_id: str | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -313,7 +311,7 @@ def update( project_id: str | NotGiven = NOT_GIVEN, provide_citations: bool | NotGiven = NOT_GIVEN, retrieval_method: APIRetrievalMethod | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, temperature: float | NotGiven = NOT_GIVEN, top_p: float | NotGiven = NOT_GIVEN, body_uuid: str | NotGiven = NOT_GIVEN, @@ -626,13 +624,13 @@ async def create( anthropic_key_uuid: str | NotGiven = NOT_GIVEN, description: str | NotGiven = NOT_GIVEN, instruction: str | NotGiven = NOT_GIVEN, - knowledge_base_uuid: List[str] | NotGiven = NOT_GIVEN, + knowledge_base_uuid: SequenceNotStr[str] | NotGiven = NOT_GIVEN, model_uuid: str | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, openai_key_uuid: str | NotGiven = NOT_GIVEN, project_id: str | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -756,7 +754,7 @@ async def update( project_id: str | NotGiven = NOT_GIVEN, provide_citations: bool | NotGiven = NOT_GIVEN, retrieval_method: APIRetrievalMethod | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, temperature: float | NotGiven = NOT_GIVEN, top_p: float | NotGiven = NOT_GIVEN, body_uuid: str | NotGiven = NOT_GIVEN, diff --git a/src/gradient/resources/agents/chat/completions.py b/src/gradient/resources/agents/chat/completions.py index 88d6c241..fb4523c1 100644 --- a/src/gradient/resources/agents/chat/completions.py +++ b/src/gradient/resources/agents/chat/completions.py @@ -2,12 +2,12 @@ from __future__ import annotations -from typing import Dict, List, Union, Iterable, Optional +from typing import Dict, Union, Iterable, Optional from typing_extensions import Literal, overload import httpx -from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ...._utils import required_args, maybe_transform, async_maybe_transform from ...._compat import cached_property from ...._resource import SyncAPIResource, AsyncAPIResource @@ -60,7 +60,7 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, @@ -189,8 +189,8 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -317,8 +317,8 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -447,7 +447,7 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, @@ -549,7 +549,7 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, @@ -678,8 +678,8 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -806,8 +806,8 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -933,7 +933,7 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, diff --git a/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py b/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py index 1a73bc60..408396b1 100644 --- a/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py +++ b/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py @@ -2,11 +2,9 @@ from __future__ import annotations -from typing import List - import httpx -from ....._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ....._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ....._utils import maybe_transform, async_maybe_transform from ....._compat import cached_property from ....._resource import SyncAPIResource, AsyncAPIResource @@ -104,7 +102,7 @@ def move( self, path_workspace_uuid: str, *, - agent_uuids: List[str] | NotGiven = NOT_GIVEN, + agent_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, body_workspace_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -232,7 +230,7 @@ async def move( self, path_workspace_uuid: str, *, - agent_uuids: List[str] | NotGiven = NOT_GIVEN, + agent_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, body_workspace_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. diff --git a/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py b/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py index a2cf5ebc..e6f610ef 100644 --- a/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py +++ b/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py @@ -2,8 +2,6 @@ from __future__ import annotations -from typing import List - import httpx from .agents import ( @@ -14,7 +12,7 @@ AgentsResourceWithStreamingResponse, AsyncAgentsResourceWithStreamingResponse, ) -from ....._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ....._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ....._utils import maybe_transform, async_maybe_transform from ....._compat import cached_property from ....._resource import SyncAPIResource, AsyncAPIResource @@ -65,7 +63,7 @@ def with_streaming_response(self) -> WorkspacesResourceWithStreamingResponse: def create( self, *, - agent_uuids: List[str] | NotGiven = NOT_GIVEN, + agent_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, description: str | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -327,7 +325,7 @@ def with_streaming_response(self) -> AsyncWorkspacesResourceWithStreamingRespons async def create( self, *, - agent_uuids: List[str] | NotGiven = NOT_GIVEN, + agent_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, description: str | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. diff --git a/src/gradient/resources/agents/evaluation_runs.py b/src/gradient/resources/agents/evaluation_runs.py index e55cc275..e00c9eb3 100644 --- a/src/gradient/resources/agents/evaluation_runs.py +++ b/src/gradient/resources/agents/evaluation_runs.py @@ -2,11 +2,9 @@ from __future__ import annotations -from typing import List - import httpx -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ..._utils import maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -49,7 +47,7 @@ def with_streaming_response(self) -> EvaluationRunsResourceWithStreamingResponse def create( self, *, - agent_uuids: List[str] | NotGiven = NOT_GIVEN, + agent_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, run_name: str | NotGiven = NOT_GIVEN, test_case_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -251,7 +249,7 @@ def with_streaming_response(self) -> AsyncEvaluationRunsResourceWithStreamingRes async def create( self, *, - agent_uuids: List[str] | NotGiven = NOT_GIVEN, + agent_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, run_name: str | NotGiven = NOT_GIVEN, test_case_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. diff --git a/src/gradient/resources/agents/evaluation_test_cases.py b/src/gradient/resources/agents/evaluation_test_cases.py index 454576c8..07f0a251 100644 --- a/src/gradient/resources/agents/evaluation_test_cases.py +++ b/src/gradient/resources/agents/evaluation_test_cases.py @@ -2,11 +2,9 @@ from __future__ import annotations -from typing import List - import httpx -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ..._utils import maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -60,7 +58,7 @@ def create( *, dataset_uuid: str | NotGiven = NOT_GIVEN, description: str | NotGiven = NOT_GIVEN, - metrics: List[str] | NotGiven = NOT_GIVEN, + metrics: SequenceNotStr[str] | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, star_metric: APIStarMetricParam | NotGiven = NOT_GIVEN, workspace_uuid: str | NotGiven = NOT_GIVEN, @@ -322,7 +320,7 @@ async def create( *, dataset_uuid: str | NotGiven = NOT_GIVEN, description: str | NotGiven = NOT_GIVEN, - metrics: List[str] | NotGiven = NOT_GIVEN, + metrics: SequenceNotStr[str] | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, star_metric: APIStarMetricParam | NotGiven = NOT_GIVEN, workspace_uuid: str | NotGiven = NOT_GIVEN, diff --git a/src/gradient/resources/chat/completions.py b/src/gradient/resources/chat/completions.py index 3a412b10..3017deb6 100644 --- a/src/gradient/resources/chat/completions.py +++ b/src/gradient/resources/chat/completions.py @@ -2,12 +2,12 @@ from __future__ import annotations -from typing import Dict, List, Union, Iterable, Optional +from typing import Dict, Union, Iterable, Optional from typing_extensions import Literal, overload import httpx -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ..._utils import required_args, maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -60,7 +60,7 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, @@ -190,8 +190,8 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -318,8 +318,8 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -445,7 +445,7 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, @@ -550,7 +550,7 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, @@ -680,8 +680,8 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -808,8 +808,8 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -935,7 +935,7 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, diff --git a/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py b/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py index 2f3b90cf..0d55cb48 100644 --- a/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py +++ b/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py @@ -2,11 +2,9 @@ from __future__ import annotations -from typing import List - import httpx -from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ..._utils import maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -174,11 +172,11 @@ def delete_selective( self, droplet_id: int, *, - floating_ips: List[str] | NotGiven = NOT_GIVEN, - reserved_ips: List[str] | NotGiven = NOT_GIVEN, - snapshots: List[str] | NotGiven = NOT_GIVEN, - volume_snapshots: List[str] | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + floating_ips: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + reserved_ips: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + snapshots: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + volume_snapshots: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -430,11 +428,11 @@ async def delete_selective( self, droplet_id: int, *, - floating_ips: List[str] | NotGiven = NOT_GIVEN, - reserved_ips: List[str] | NotGiven = NOT_GIVEN, - snapshots: List[str] | NotGiven = NOT_GIVEN, - volume_snapshots: List[str] | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + floating_ips: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + reserved_ips: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + snapshots: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + volume_snapshots: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, diff --git a/src/gradient/resources/gpu_droplets/firewalls/tags.py b/src/gradient/resources/gpu_droplets/firewalls/tags.py index dc66c72f..ee13acd5 100644 --- a/src/gradient/resources/gpu_droplets/firewalls/tags.py +++ b/src/gradient/resources/gpu_droplets/firewalls/tags.py @@ -2,11 +2,11 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional import httpx -from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ...._utils import maybe_transform, async_maybe_transform from ...._compat import cached_property from ...._resource import SyncAPIResource, AsyncAPIResource @@ -46,7 +46,7 @@ def add( self, firewall_id: str, *, - tags: Optional[List[str]], + tags: Optional[SequenceNotStr[str]], # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -95,7 +95,7 @@ def remove( self, firewall_id: str, *, - tags: Optional[List[str]], + tags: Optional[SequenceNotStr[str]], # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -165,7 +165,7 @@ async def add( self, firewall_id: str, *, - tags: Optional[List[str]], + tags: Optional[SequenceNotStr[str]], # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -214,7 +214,7 @@ async def remove( self, firewall_id: str, *, - tags: Optional[List[str]], + tags: Optional[SequenceNotStr[str]], # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, diff --git a/src/gradient/resources/gpu_droplets/gpu_droplets.py b/src/gradient/resources/gpu_droplets/gpu_droplets.py index 0ce55ba8..48a9e5fe 100644 --- a/src/gradient/resources/gpu_droplets/gpu_droplets.py +++ b/src/gradient/resources/gpu_droplets/gpu_droplets.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any, List, Union, Optional, cast +from typing import Any, Union, Optional, cast from typing_extensions import Literal, overload import httpx @@ -39,7 +39,7 @@ BackupsResourceWithStreamingResponse, AsyncBackupsResourceWithStreamingResponse, ) -from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ..._utils import required_args, maybe_transform, async_maybe_transform from ..._compat import cached_property from .autoscale import ( @@ -215,10 +215,10 @@ def create( monitoring: bool | NotGiven = NOT_GIVEN, private_networking: bool | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - ssh_keys: List[Union[str, int]] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + ssh_keys: SequenceNotStr[Union[str, int]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, user_data: str | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, with_droplet_agent: bool | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -328,7 +328,7 @@ def create( self, *, image: Union[str, int], - names: List[str], + names: SequenceNotStr[str], size: str, backup_policy: DropletBackupPolicyParam | NotGiven = NOT_GIVEN, backups: bool | NotGiven = NOT_GIVEN, @@ -336,10 +336,10 @@ def create( monitoring: bool | NotGiven = NOT_GIVEN, private_networking: bool | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - ssh_keys: List[Union[str, int]] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + ssh_keys: SequenceNotStr[Union[str, int]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, user_data: str | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, with_droplet_agent: bool | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -458,13 +458,13 @@ def create( monitoring: bool | NotGiven = NOT_GIVEN, private_networking: bool | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - ssh_keys: List[Union[str, int]] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + ssh_keys: SequenceNotStr[Union[str, int]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, user_data: str | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, with_droplet_agent: bool | NotGiven = NOT_GIVEN, - names: List[str] | NotGiven = NOT_GIVEN, + names: SequenceNotStr[str] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -986,10 +986,10 @@ async def create( monitoring: bool | NotGiven = NOT_GIVEN, private_networking: bool | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - ssh_keys: List[Union[str, int]] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + ssh_keys: SequenceNotStr[Union[str, int]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, user_data: str | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, with_droplet_agent: bool | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -1099,7 +1099,7 @@ async def create( self, *, image: Union[str, int], - names: List[str], + names: SequenceNotStr[str], size: str, backup_policy: DropletBackupPolicyParam | NotGiven = NOT_GIVEN, backups: bool | NotGiven = NOT_GIVEN, @@ -1107,10 +1107,10 @@ async def create( monitoring: bool | NotGiven = NOT_GIVEN, private_networking: bool | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - ssh_keys: List[Union[str, int]] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + ssh_keys: SequenceNotStr[Union[str, int]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, user_data: str | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, with_droplet_agent: bool | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -1229,13 +1229,13 @@ async def create( monitoring: bool | NotGiven = NOT_GIVEN, private_networking: bool | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - ssh_keys: List[Union[str, int]] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + ssh_keys: SequenceNotStr[Union[str, int]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, user_data: str | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, with_droplet_agent: bool | NotGiven = NOT_GIVEN, - names: List[str] | NotGiven = NOT_GIVEN, + names: SequenceNotStr[str] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, diff --git a/src/gradient/resources/gpu_droplets/images/images.py b/src/gradient/resources/gpu_droplets/images/images.py index 09994263..1b00c024 100644 --- a/src/gradient/resources/gpu_droplets/images/images.py +++ b/src/gradient/resources/gpu_droplets/images/images.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import List, Union, Optional +from typing import Union, Optional from typing_extensions import Literal import httpx @@ -15,7 +15,7 @@ ActionsResourceWithStreamingResponse, AsyncActionsResourceWithStreamingResponse, ) -from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ...._utils import maybe_transform, async_maybe_transform from ...._compat import cached_property from ...._resource import SyncAPIResource, AsyncAPIResource @@ -98,7 +98,7 @@ def create( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, url: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -464,7 +464,7 @@ async def create( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, url: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. diff --git a/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py b/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py index d876b50f..8f11a5da 100644 --- a/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py +++ b/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import List, Iterable +from typing import Iterable from typing_extensions import Literal, overload import httpx @@ -15,7 +15,7 @@ DropletsResourceWithStreamingResponse, AsyncDropletsResourceWithStreamingResponse, ) -from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ...._utils import required_args, maybe_transform, async_maybe_transform from ...._compat import cached_property from ...._resource import SyncAPIResource, AsyncAPIResource @@ -122,7 +122,7 @@ def create( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -281,7 +281,7 @@ def create( size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, tag: str | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -441,7 +441,7 @@ def create( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -571,7 +571,7 @@ def update( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -725,7 +725,7 @@ def update( size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, tag: str | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -880,7 +880,7 @@ def update( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -1134,7 +1134,7 @@ async def create( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -1293,7 +1293,7 @@ async def create( size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, tag: str | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -1453,7 +1453,7 @@ async def create( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -1583,7 +1583,7 @@ async def update( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -1737,7 +1737,7 @@ async def update( size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, tag: str | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -1892,7 +1892,7 @@ async def update( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, diff --git a/src/gradient/resources/gpu_droplets/volumes/actions.py b/src/gradient/resources/gpu_droplets/volumes/actions.py index 2e093136..c648beaa 100644 --- a/src/gradient/resources/gpu_droplets/volumes/actions.py +++ b/src/gradient/resources/gpu_droplets/volumes/actions.py @@ -2,12 +2,12 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from typing_extensions import Literal, overload import httpx -from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ...._utils import required_args, maybe_transform, async_maybe_transform from ...._compat import cached_property from ...._resource import SyncAPIResource, AsyncAPIResource @@ -184,7 +184,7 @@ def initiate_by_id( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -474,7 +474,7 @@ def initiate_by_id( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, size_gigabytes: int | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -541,7 +541,7 @@ def initiate_by_name( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -719,7 +719,7 @@ def initiate_by_name( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -909,7 +909,7 @@ async def initiate_by_id( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -1199,7 +1199,7 @@ async def initiate_by_id( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, size_gigabytes: int | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -1266,7 +1266,7 @@ async def initiate_by_name( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -1444,7 +1444,7 @@ async def initiate_by_name( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, diff --git a/src/gradient/resources/gpu_droplets/volumes/snapshots.py b/src/gradient/resources/gpu_droplets/volumes/snapshots.py index 0f9e30fa..7e925264 100644 --- a/src/gradient/resources/gpu_droplets/volumes/snapshots.py +++ b/src/gradient/resources/gpu_droplets/volumes/snapshots.py @@ -2,11 +2,11 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional import httpx -from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ...._utils import maybe_transform, async_maybe_transform from ...._compat import cached_property from ...._resource import SyncAPIResource, AsyncAPIResource @@ -50,7 +50,7 @@ def create( volume_id: str, *, name: str, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -251,7 +251,7 @@ async def create( volume_id: str, *, name: str, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, diff --git a/src/gradient/resources/gpu_droplets/volumes/volumes.py b/src/gradient/resources/gpu_droplets/volumes/volumes.py index ada4aedf..ee980a10 100644 --- a/src/gradient/resources/gpu_droplets/volumes/volumes.py +++ b/src/gradient/resources/gpu_droplets/volumes/volumes.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from typing_extensions import Literal, overload import httpx @@ -15,7 +15,7 @@ ActionsResourceWithStreamingResponse, AsyncActionsResourceWithStreamingResponse, ) -from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ...._utils import required_args, maybe_transform, async_maybe_transform from .snapshots import ( SnapshotsResource, @@ -97,7 +97,7 @@ def create( filesystem_label: str | NotGiven = NOT_GIVEN, filesystem_type: str | NotGiven = NOT_GIVEN, snapshot_id: str | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -182,7 +182,7 @@ def create( filesystem_label: str | NotGiven = NOT_GIVEN, filesystem_type: str | NotGiven = NOT_GIVEN, snapshot_id: str | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -267,7 +267,7 @@ def create( filesystem_label: str | NotGiven = NOT_GIVEN, filesystem_type: str | NotGiven = NOT_GIVEN, snapshot_id: str | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -590,7 +590,7 @@ async def create( filesystem_label: str | NotGiven = NOT_GIVEN, filesystem_type: str | NotGiven = NOT_GIVEN, snapshot_id: str | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -675,7 +675,7 @@ async def create( filesystem_label: str | NotGiven = NOT_GIVEN, filesystem_type: str | NotGiven = NOT_GIVEN, snapshot_id: str | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -760,7 +760,7 @@ async def create( filesystem_label: str | NotGiven = NOT_GIVEN, filesystem_type: str | NotGiven = NOT_GIVEN, snapshot_id: str | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, diff --git a/src/gradient/resources/knowledge_bases/indexing_jobs.py b/src/gradient/resources/knowledge_bases/indexing_jobs.py index 723b42f5..41e7da76 100644 --- a/src/gradient/resources/knowledge_bases/indexing_jobs.py +++ b/src/gradient/resources/knowledge_bases/indexing_jobs.py @@ -2,11 +2,9 @@ from __future__ import annotations -from typing import List - import httpx -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ..._utils import maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -54,7 +52,7 @@ def with_streaming_response(self) -> IndexingJobsResourceWithStreamingResponse: def create( self, *, - data_source_uuids: List[str] | NotGiven = NOT_GIVEN, + data_source_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, knowledge_base_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -285,7 +283,7 @@ def with_streaming_response(self) -> AsyncIndexingJobsResourceWithStreamingRespo async def create( self, *, - data_source_uuids: List[str] | NotGiven = NOT_GIVEN, + data_source_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, knowledge_base_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. diff --git a/src/gradient/resources/knowledge_bases/knowledge_bases.py b/src/gradient/resources/knowledge_bases/knowledge_bases.py index 594b2ba7..61fc85a8 100644 --- a/src/gradient/resources/knowledge_bases/knowledge_bases.py +++ b/src/gradient/resources/knowledge_bases/knowledge_bases.py @@ -2,12 +2,12 @@ from __future__ import annotations -from typing import List, Iterable +from typing import Iterable import httpx from ...types import knowledge_base_list_params, knowledge_base_create_params, knowledge_base_update_params -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ..._utils import maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -80,7 +80,7 @@ def create( name: str | NotGiven = NOT_GIVEN, project_id: str | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -189,7 +189,7 @@ def update( embedding_model_uuid: str | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, project_id: str | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, body_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -368,7 +368,7 @@ async def create( name: str | NotGiven = NOT_GIVEN, project_id: str | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -477,7 +477,7 @@ async def update( embedding_model_uuid: str | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, project_id: str | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, body_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. diff --git a/src/gradient/types/__init__.py b/src/gradient/types/__init__.py index d5486cba..d3d7dab0 100644 --- a/src/gradient/types/__init__.py +++ b/src/gradient/types/__init__.py @@ -151,7 +151,31 @@ # This ensures that, when building the deferred (due to cyclical references) model schema, # Pydantic can resolve the necessary references. # See: https://github.com/pydantic/pydantic/issues/11250 for more context. -if _compat.PYDANTIC_V2: +if _compat.PYDANTIC_V1: + api_agent.APIAgent.update_forward_refs() # type: ignore + api_workspace.APIWorkspace.update_forward_refs() # type: ignore + agent_create_response.AgentCreateResponse.update_forward_refs() # type: ignore + agent_retrieve_response.AgentRetrieveResponse.update_forward_refs() # type: ignore + agent_update_response.AgentUpdateResponse.update_forward_refs() # type: ignore + agent_delete_response.AgentDeleteResponse.update_forward_refs() # type: ignore + agent_update_status_response.AgentUpdateStatusResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.workspace_create_response.WorkspaceCreateResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.workspace_retrieve_response.WorkspaceRetrieveResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.workspace_update_response.WorkspaceUpdateResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.workspace_list_response.WorkspaceListResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.workspaces.agent_list_response.AgentListResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.workspaces.agent_move_response.AgentMoveResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.anthropic.key_list_agents_response.KeyListAgentsResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.openai.key_list_agents_response.KeyListAgentsResponse.update_forward_refs() # type: ignore + agents.function_create_response.FunctionCreateResponse.update_forward_refs() # type: ignore + agents.function_update_response.FunctionUpdateResponse.update_forward_refs() # type: ignore + agents.function_delete_response.FunctionDeleteResponse.update_forward_refs() # type: ignore + agents.api_link_knowledge_base_output.APILinkKnowledgeBaseOutput.update_forward_refs() # type: ignore + agents.knowledge_base_detach_response.KnowledgeBaseDetachResponse.update_forward_refs() # type: ignore + agents.route_view_response.RouteViewResponse.update_forward_refs() # type: ignore + models.providers.anthropic_list_agents_response.AnthropicListAgentsResponse.update_forward_refs() # type: ignore + models.providers.openai_retrieve_agents_response.OpenAIRetrieveAgentsResponse.update_forward_refs() # type: ignore +else: api_agent.APIAgent.model_rebuild(_parent_namespace_depth=0) api_workspace.APIWorkspace.model_rebuild(_parent_namespace_depth=0) agent_create_response.AgentCreateResponse.model_rebuild(_parent_namespace_depth=0) @@ -183,27 +207,3 @@ models.providers.openai_retrieve_agents_response.OpenAIRetrieveAgentsResponse.model_rebuild( _parent_namespace_depth=0 ) -else: - api_agent.APIAgent.update_forward_refs() # type: ignore - api_workspace.APIWorkspace.update_forward_refs() # type: ignore - agent_create_response.AgentCreateResponse.update_forward_refs() # type: ignore - agent_retrieve_response.AgentRetrieveResponse.update_forward_refs() # type: ignore - agent_update_response.AgentUpdateResponse.update_forward_refs() # type: ignore - agent_delete_response.AgentDeleteResponse.update_forward_refs() # type: ignore - agent_update_status_response.AgentUpdateStatusResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.workspace_create_response.WorkspaceCreateResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.workspace_retrieve_response.WorkspaceRetrieveResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.workspace_update_response.WorkspaceUpdateResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.workspace_list_response.WorkspaceListResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.workspaces.agent_list_response.AgentListResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.workspaces.agent_move_response.AgentMoveResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.anthropic.key_list_agents_response.KeyListAgentsResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.openai.key_list_agents_response.KeyListAgentsResponse.update_forward_refs() # type: ignore - agents.function_create_response.FunctionCreateResponse.update_forward_refs() # type: ignore - agents.function_update_response.FunctionUpdateResponse.update_forward_refs() # type: ignore - agents.function_delete_response.FunctionDeleteResponse.update_forward_refs() # type: ignore - agents.api_link_knowledge_base_output.APILinkKnowledgeBaseOutput.update_forward_refs() # type: ignore - agents.knowledge_base_detach_response.KnowledgeBaseDetachResponse.update_forward_refs() # type: ignore - agents.route_view_response.RouteViewResponse.update_forward_refs() # type: ignore - models.providers.anthropic_list_agents_response.AnthropicListAgentsResponse.update_forward_refs() # type: ignore - models.providers.openai_retrieve_agents_response.OpenAIRetrieveAgentsResponse.update_forward_refs() # type: ignore diff --git a/src/gradient/types/agent_create_params.py b/src/gradient/types/agent_create_params.py index 68ebd227..db84a258 100644 --- a/src/gradient/types/agent_create_params.py +++ b/src/gradient/types/agent_create_params.py @@ -2,9 +2,9 @@ from __future__ import annotations -from typing import List from typing_extensions import Annotated, TypedDict +from .._types import SequenceNotStr from .._utils import PropertyInfo __all__ = ["AgentCreateParams"] @@ -25,7 +25,7 @@ class AgentCreateParams(TypedDict, total=False): for best practices. """ - knowledge_base_uuid: List[str] + knowledge_base_uuid: SequenceNotStr[str] """Ids of the knowledge base(s) to attach to the agent""" model_uuid: str @@ -43,5 +43,5 @@ class AgentCreateParams(TypedDict, total=False): region: str """The DigitalOcean region to deploy your agent in""" - tags: List[str] + tags: SequenceNotStr[str] """Agent tag to organize related resources""" diff --git a/src/gradient/types/agent_update_params.py b/src/gradient/types/agent_update_params.py index c26bf833..75c30cba 100644 --- a/src/gradient/types/agent_update_params.py +++ b/src/gradient/types/agent_update_params.py @@ -2,9 +2,9 @@ from __future__ import annotations -from typing import List from typing_extensions import Annotated, TypedDict +from .._types import SequenceNotStr from .._utils import PropertyInfo from .api_retrieval_method import APIRetrievalMethod @@ -64,7 +64,7 @@ class AgentUpdateParams(TypedDict, total=False): - RETRIEVAL_METHOD_NONE: The retrieval method is none """ - tags: List[str] + tags: SequenceNotStr[str] """A set of abitrary tags to organize your agent""" temperature: float diff --git a/src/gradient/types/agents/chat/completion_create_params.py b/src/gradient/types/agents/chat/completion_create_params.py index aaec2ba5..d8cf7bc1 100644 --- a/src/gradient/types/agents/chat/completion_create_params.py +++ b/src/gradient/types/agents/chat/completion_create_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import Dict, List, Union, Iterable, Optional +from typing import Dict, Union, Iterable, Optional from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ...._types import SequenceNotStr + __all__ = [ "CompletionCreateParamsBase", "Message", @@ -96,7 +98,7 @@ class CompletionCreateParamsBase(TypedDict, total=False): far, increasing the model's likelihood to talk about new topics. """ - stop: Union[Optional[str], List[str], None] + stop: Union[Optional[str], SequenceNotStr[str], None] """Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. @@ -156,7 +158,7 @@ class CompletionCreateParamsBase(TypedDict, total=False): class MessageChatCompletionRequestSystemMessage(TypedDict, total=False): - content: Required[Union[str, List[str]]] + content: Required[Union[str, SequenceNotStr[str]]] """The contents of the system message.""" role: Required[Literal["system"]] @@ -164,7 +166,7 @@ class MessageChatCompletionRequestSystemMessage(TypedDict, total=False): class MessageChatCompletionRequestDeveloperMessage(TypedDict, total=False): - content: Required[Union[str, List[str]]] + content: Required[Union[str, SequenceNotStr[str]]] """The contents of the developer message.""" role: Required[Literal["developer"]] @@ -172,7 +174,7 @@ class MessageChatCompletionRequestDeveloperMessage(TypedDict, total=False): class MessageChatCompletionRequestUserMessage(TypedDict, total=False): - content: Required[Union[str, List[str]]] + content: Required[Union[str, SequenceNotStr[str]]] """The contents of the user message.""" role: Required[Literal["user"]] @@ -207,7 +209,7 @@ class MessageChatCompletionRequestAssistantMessage(TypedDict, total=False): role: Required[Literal["assistant"]] """The role of the messages author, in this case `assistant`.""" - content: Union[str, List[str], None] + content: Union[str, SequenceNotStr[str], None] """The contents of the assistant message.""" tool_calls: Iterable[MessageChatCompletionRequestAssistantMessageToolCall] diff --git a/src/gradient/types/agents/evaluation_metrics/workspace_create_params.py b/src/gradient/types/agents/evaluation_metrics/workspace_create_params.py index 7a418e81..443a6f43 100644 --- a/src/gradient/types/agents/evaluation_metrics/workspace_create_params.py +++ b/src/gradient/types/agents/evaluation_metrics/workspace_create_params.py @@ -2,14 +2,15 @@ from __future__ import annotations -from typing import List from typing_extensions import TypedDict +from ...._types import SequenceNotStr + __all__ = ["WorkspaceCreateParams"] class WorkspaceCreateParams(TypedDict, total=False): - agent_uuids: List[str] + agent_uuids: SequenceNotStr[str] """Ids of the agents(s) to attach to the workspace""" description: str diff --git a/src/gradient/types/agents/evaluation_metrics/workspaces/agent_move_params.py b/src/gradient/types/agents/evaluation_metrics/workspaces/agent_move_params.py index 74e27dd2..7b451084 100644 --- a/src/gradient/types/agents/evaluation_metrics/workspaces/agent_move_params.py +++ b/src/gradient/types/agents/evaluation_metrics/workspaces/agent_move_params.py @@ -2,16 +2,16 @@ from __future__ import annotations -from typing import List from typing_extensions import Annotated, TypedDict +from ....._types import SequenceNotStr from ....._utils import PropertyInfo __all__ = ["AgentMoveParams"] class AgentMoveParams(TypedDict, total=False): - agent_uuids: List[str] + agent_uuids: SequenceNotStr[str] """Agent uuids""" body_workspace_uuid: Annotated[str, PropertyInfo(alias="workspace_uuid")] diff --git a/src/gradient/types/agents/evaluation_run_create_params.py b/src/gradient/types/agents/evaluation_run_create_params.py index 3029e192..52bbee85 100644 --- a/src/gradient/types/agents/evaluation_run_create_params.py +++ b/src/gradient/types/agents/evaluation_run_create_params.py @@ -2,14 +2,15 @@ from __future__ import annotations -from typing import List from typing_extensions import TypedDict +from ..._types import SequenceNotStr + __all__ = ["EvaluationRunCreateParams"] class EvaluationRunCreateParams(TypedDict, total=False): - agent_uuids: List[str] + agent_uuids: SequenceNotStr[str] """Agent UUIDs to run the test case against.""" run_name: str diff --git a/src/gradient/types/agents/evaluation_test_case_create_params.py b/src/gradient/types/agents/evaluation_test_case_create_params.py index 51ce20c7..af49d024 100644 --- a/src/gradient/types/agents/evaluation_test_case_create_params.py +++ b/src/gradient/types/agents/evaluation_test_case_create_params.py @@ -2,9 +2,9 @@ from __future__ import annotations -from typing import List from typing_extensions import TypedDict +from ..._types import SequenceNotStr from .api_star_metric_param import APIStarMetricParam __all__ = ["EvaluationTestCaseCreateParams"] @@ -17,7 +17,7 @@ class EvaluationTestCaseCreateParams(TypedDict, total=False): description: str """Description of the test case.""" - metrics: List[str] + metrics: SequenceNotStr[str] """Full metric list to use for evaluation test case.""" name: str diff --git a/src/gradient/types/agents/evaluation_test_case_update_params.py b/src/gradient/types/agents/evaluation_test_case_update_params.py index 825f961b..d707d909 100644 --- a/src/gradient/types/agents/evaluation_test_case_update_params.py +++ b/src/gradient/types/agents/evaluation_test_case_update_params.py @@ -2,9 +2,9 @@ from __future__ import annotations -from typing import List from typing_extensions import Annotated, TypedDict +from ..._types import SequenceNotStr from ..._utils import PropertyInfo from .api_star_metric_param import APIStarMetricParam @@ -30,4 +30,4 @@ class EvaluationTestCaseUpdateParams(TypedDict, total=False): class Metrics(TypedDict, total=False): - metric_uuids: List[str] + metric_uuids: SequenceNotStr[str] diff --git a/src/gradient/types/chat/completion_create_params.py b/src/gradient/types/chat/completion_create_params.py index aaec2ba5..17f00242 100644 --- a/src/gradient/types/chat/completion_create_params.py +++ b/src/gradient/types/chat/completion_create_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import Dict, List, Union, Iterable, Optional +from typing import Dict, Union, Iterable, Optional from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ..._types import SequenceNotStr + __all__ = [ "CompletionCreateParamsBase", "Message", @@ -96,7 +98,7 @@ class CompletionCreateParamsBase(TypedDict, total=False): far, increasing the model's likelihood to talk about new topics. """ - stop: Union[Optional[str], List[str], None] + stop: Union[Optional[str], SequenceNotStr[str], None] """Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. @@ -156,7 +158,7 @@ class CompletionCreateParamsBase(TypedDict, total=False): class MessageChatCompletionRequestSystemMessage(TypedDict, total=False): - content: Required[Union[str, List[str]]] + content: Required[Union[str, SequenceNotStr[str]]] """The contents of the system message.""" role: Required[Literal["system"]] @@ -164,7 +166,7 @@ class MessageChatCompletionRequestSystemMessage(TypedDict, total=False): class MessageChatCompletionRequestDeveloperMessage(TypedDict, total=False): - content: Required[Union[str, List[str]]] + content: Required[Union[str, SequenceNotStr[str]]] """The contents of the developer message.""" role: Required[Literal["developer"]] @@ -172,7 +174,7 @@ class MessageChatCompletionRequestDeveloperMessage(TypedDict, total=False): class MessageChatCompletionRequestUserMessage(TypedDict, total=False): - content: Required[Union[str, List[str]]] + content: Required[Union[str, SequenceNotStr[str]]] """The contents of the user message.""" role: Required[Literal["user"]] @@ -207,7 +209,7 @@ class MessageChatCompletionRequestAssistantMessage(TypedDict, total=False): role: Required[Literal["assistant"]] """The role of the messages author, in this case `assistant`.""" - content: Union[str, List[str], None] + content: Union[str, SequenceNotStr[str], None] """The contents of the assistant message.""" tool_calls: Iterable[MessageChatCompletionRequestAssistantMessageToolCall] diff --git a/src/gradient/types/gpu_droplet_create_params.py b/src/gradient/types/gpu_droplet_create_params.py index f38661fb..96403479 100644 --- a/src/gradient/types/gpu_droplet_create_params.py +++ b/src/gradient/types/gpu_droplet_create_params.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import List, Union, Optional +from typing import Union, Optional from typing_extensions import Required, TypeAlias, TypedDict +from .._types import SequenceNotStr from .droplet_backup_policy_param import DropletBackupPolicyParam __all__ = ["GPUDropletCreateParams", "DropletSingleCreate", "DropletMultiCreate"] @@ -65,14 +66,14 @@ class DropletSingleCreate(TypedDict, total=False): the Droplet may deploy in any region. """ - ssh_keys: List[Union[str, int]] + ssh_keys: SequenceNotStr[Union[str, int]] """ An array containing the IDs or fingerprints of the SSH keys that you wish to embed in the Droplet's root account upon creation. You must add the keys to your team before they can be embedded on a Droplet. Requires `ssh_key:read` scope. """ - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to apply to the Droplet after it is created. @@ -86,7 +87,7 @@ class DropletSingleCreate(TypedDict, total=False): and may not exceed 64 KiB in size. """ - volumes: List[str] + volumes: SequenceNotStr[str] """ An array of IDs for block storage volumes that will be attached to the Droplet once created. The volumes must not already be attached to an existing Droplet. @@ -118,7 +119,7 @@ class DropletMultiCreate(TypedDict, total=False): scope. """ - names: Required[List[str]] + names: Required[SequenceNotStr[str]] """ An array of human human-readable strings you wish to use when displaying the Droplet name. Each name, if set to a domain name managed in the DigitalOcean DNS @@ -165,14 +166,14 @@ class DropletMultiCreate(TypedDict, total=False): the Droplet may deploy in any region. """ - ssh_keys: List[Union[str, int]] + ssh_keys: SequenceNotStr[Union[str, int]] """ An array containing the IDs or fingerprints of the SSH keys that you wish to embed in the Droplet's root account upon creation. You must add the keys to your team before they can be embedded on a Droplet. Requires `ssh_key:read` scope. """ - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to apply to the Droplet after it is created. @@ -186,7 +187,7 @@ class DropletMultiCreate(TypedDict, total=False): and may not exceed 64 KiB in size. """ - volumes: List[str] + volumes: SequenceNotStr[str] """ An array of IDs for block storage volumes that will be attached to the Droplet once created. The volumes must not already be attached to an existing Droplet. diff --git a/src/gradient/types/gpu_droplets/autoscale_pool_droplet_template_param.py b/src/gradient/types/gpu_droplets/autoscale_pool_droplet_template_param.py index c491ed55..3eb8ac89 100644 --- a/src/gradient/types/gpu_droplets/autoscale_pool_droplet_template_param.py +++ b/src/gradient/types/gpu_droplets/autoscale_pool_droplet_template_param.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import List from typing_extensions import Literal, Required, TypedDict +from ..._types import SequenceNotStr + __all__ = ["AutoscalePoolDropletTemplateParam"] @@ -38,7 +39,7 @@ class AutoscalePoolDropletTemplateParam(TypedDict, total=False): size: Required[str] """The Droplet size to be used for all Droplets in the autoscale pool.""" - ssh_keys: Required[List[str]] + ssh_keys: Required[SequenceNotStr[str]] """The SSH keys to be installed on the Droplets in the autoscale pool. You can either specify the key ID or the fingerprint. Requires `ssh_key:read` @@ -57,7 +58,7 @@ class AutoscalePoolDropletTemplateParam(TypedDict, total=False): `project:read` scope. """ - tags: List[str] + tags: SequenceNotStr[str] """ The tags to apply to each of the Droplets in the autoscale pool. Requires `tag:read` scope. diff --git a/src/gradient/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py b/src/gradient/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py index f4037b6b..9a9730e7 100644 --- a/src/gradient/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py +++ b/src/gradient/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py @@ -2,33 +2,34 @@ from __future__ import annotations -from typing import List from typing_extensions import TypedDict +from ..._types import SequenceNotStr + __all__ = ["DestroyWithAssociatedResourceDeleteSelectiveParams"] class DestroyWithAssociatedResourceDeleteSelectiveParams(TypedDict, total=False): - floating_ips: List[str] + floating_ips: SequenceNotStr[str] """ An array of unique identifiers for the floating IPs to be scheduled for deletion. """ - reserved_ips: List[str] + reserved_ips: SequenceNotStr[str] """ An array of unique identifiers for the reserved IPs to be scheduled for deletion. """ - snapshots: List[str] + snapshots: SequenceNotStr[str] """An array of unique identifiers for the snapshots to be scheduled for deletion.""" - volume_snapshots: List[str] + volume_snapshots: SequenceNotStr[str] """ An array of unique identifiers for the volume snapshots to be scheduled for deletion. """ - volumes: List[str] + volumes: SequenceNotStr[str] """An array of unique identifiers for the volumes to be scheduled for deletion.""" diff --git a/src/gradient/types/gpu_droplets/firewall_param.py b/src/gradient/types/gpu_droplets/firewall_param.py index 1be9cf6a..8b5a5a15 100644 --- a/src/gradient/types/gpu_droplets/firewall_param.py +++ b/src/gradient/types/gpu_droplets/firewall_param.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import List, Iterable, Optional +from typing import Iterable, Optional from typing_extensions import Literal, Required, TypedDict +from ..._types import SequenceNotStr from ..shared_params.firewall_rule_target import FirewallRuleTarget __all__ = ["FirewallParam", "InboundRule", "OutboundRule"] @@ -58,7 +59,7 @@ class FirewallParam(TypedDict, total=False): outbound_rules: Optional[Iterable[OutboundRule]] - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names must exist in order to be referenced in a request. diff --git a/src/gradient/types/gpu_droplets/firewalls/tag_add_params.py b/src/gradient/types/gpu_droplets/firewalls/tag_add_params.py index 63af7640..c3b9696e 100644 --- a/src/gradient/types/gpu_droplets/firewalls/tag_add_params.py +++ b/src/gradient/types/gpu_droplets/firewalls/tag_add_params.py @@ -2,14 +2,16 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from typing_extensions import Required, TypedDict +from ...._types import SequenceNotStr + __all__ = ["TagAddParams"] class TagAddParams(TypedDict, total=False): - tags: Required[Optional[List[str]]] + tags: Required[Optional[SequenceNotStr[str]]] """A flat array of tag names as strings to be applied to the resource. Tag names must exist in order to be referenced in a request. diff --git a/src/gradient/types/gpu_droplets/firewalls/tag_remove_params.py b/src/gradient/types/gpu_droplets/firewalls/tag_remove_params.py index 91a3e382..bdd848f3 100644 --- a/src/gradient/types/gpu_droplets/firewalls/tag_remove_params.py +++ b/src/gradient/types/gpu_droplets/firewalls/tag_remove_params.py @@ -2,14 +2,16 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from typing_extensions import Required, TypedDict +from ...._types import SequenceNotStr + __all__ = ["TagRemoveParams"] class TagRemoveParams(TypedDict, total=False): - tags: Required[Optional[List[str]]] + tags: Required[Optional[SequenceNotStr[str]]] """A flat array of tag names as strings to be applied to the resource. Tag names must exist in order to be referenced in a request. diff --git a/src/gradient/types/gpu_droplets/image_create_params.py b/src/gradient/types/gpu_droplets/image_create_params.py index efbd684c..baae3bf5 100644 --- a/src/gradient/types/gpu_droplets/image_create_params.py +++ b/src/gradient/types/gpu_droplets/image_create_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from typing_extensions import Literal, TypedDict +from ..._types import SequenceNotStr + __all__ = ["ImageCreateParams"] @@ -64,7 +66,7 @@ class ImageCreateParams(TypedDict, total=False): available. """ - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names may be for either existing or new tags. diff --git a/src/gradient/types/gpu_droplets/lb_firewall_param.py b/src/gradient/types/gpu_droplets/lb_firewall_param.py index 6f1dcf10..7d54a048 100644 --- a/src/gradient/types/gpu_droplets/lb_firewall_param.py +++ b/src/gradient/types/gpu_droplets/lb_firewall_param.py @@ -2,20 +2,21 @@ from __future__ import annotations -from typing import List from typing_extensions import TypedDict +from ..._types import SequenceNotStr + __all__ = ["LbFirewallParam"] class LbFirewallParam(TypedDict, total=False): - allow: List[str] + allow: SequenceNotStr[str] """ the rules for allowing traffic to the load balancer (in the form 'ip:1.2.3.4' or 'cidr:1.2.0.0/16') """ - deny: List[str] + deny: SequenceNotStr[str] """ the rules for denying traffic to the load balancer (in the form 'ip:1.2.3.4' or 'cidr:1.2.0.0/16') diff --git a/src/gradient/types/gpu_droplets/load_balancer_create_params.py b/src/gradient/types/gpu_droplets/load_balancer_create_params.py index a87d9148..06472c78 100644 --- a/src/gradient/types/gpu_droplets/load_balancer_create_params.py +++ b/src/gradient/types/gpu_droplets/load_balancer_create_params.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import List, Union, Iterable +from typing import Union, Iterable from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ..._types import SequenceNotStr from .domains_param import DomainsParam from .lb_firewall_param import LbFirewallParam from .glb_settings_param import GlbSettingsParam @@ -148,7 +149,7 @@ class AssignDropletsByID(TypedDict, total=False): sticky_sessions: StickySessionsParam """An object specifying sticky sessions settings for the load balancer.""" - target_load_balancer_ids: List[str] + target_load_balancer_ids: SequenceNotStr[str] """ An array containing the UUIDs of the Regional load balancers to be used as target backends for a Global load balancer. @@ -308,7 +309,7 @@ class AssignDropletsByTag(TypedDict, total=False): balancer. """ - target_load_balancer_ids: List[str] + target_load_balancer_ids: SequenceNotStr[str] """ An array containing the UUIDs of the Regional load balancers to be used as target backends for a Global load balancer. diff --git a/src/gradient/types/gpu_droplets/load_balancer_update_params.py b/src/gradient/types/gpu_droplets/load_balancer_update_params.py index 9a1906cb..01c2bda5 100644 --- a/src/gradient/types/gpu_droplets/load_balancer_update_params.py +++ b/src/gradient/types/gpu_droplets/load_balancer_update_params.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import List, Union, Iterable +from typing import Union, Iterable from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ..._types import SequenceNotStr from .domains_param import DomainsParam from .lb_firewall_param import LbFirewallParam from .glb_settings_param import GlbSettingsParam @@ -148,7 +149,7 @@ class AssignDropletsByID(TypedDict, total=False): sticky_sessions: StickySessionsParam """An object specifying sticky sessions settings for the load balancer.""" - target_load_balancer_ids: List[str] + target_load_balancer_ids: SequenceNotStr[str] """ An array containing the UUIDs of the Regional load balancers to be used as target backends for a Global load balancer. @@ -308,7 +309,7 @@ class AssignDropletsByTag(TypedDict, total=False): balancer. """ - target_load_balancer_ids: List[str] + target_load_balancer_ids: SequenceNotStr[str] """ An array containing the UUIDs of the Regional load balancers to be used as target backends for a Global load balancer. diff --git a/src/gradient/types/gpu_droplets/volume_create_params.py b/src/gradient/types/gpu_droplets/volume_create_params.py index fc889801..c58f7f9d 100644 --- a/src/gradient/types/gpu_droplets/volume_create_params.py +++ b/src/gradient/types/gpu_droplets/volume_create_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import List, Union, Optional +from typing import Union, Optional from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ..._types import SequenceNotStr + __all__ = ["VolumeCreateParams", "VolumesExt4", "VolumesXfs"] @@ -70,7 +72,7 @@ class VolumesExt4(TypedDict, total=False): snapshot_id: str """The unique identifier for the volume snapshot from which to create the volume.""" - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names may be for either existing or new tags. @@ -141,7 +143,7 @@ class VolumesXfs(TypedDict, total=False): snapshot_id: str """The unique identifier for the volume snapshot from which to create the volume.""" - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names may be for either existing or new tags. diff --git a/src/gradient/types/gpu_droplets/volumes/action_initiate_by_id_params.py b/src/gradient/types/gpu_droplets/volumes/action_initiate_by_id_params.py index 6d41d463..bf1869af 100644 --- a/src/gradient/types/gpu_droplets/volumes/action_initiate_by_id_params.py +++ b/src/gradient/types/gpu_droplets/volumes/action_initiate_by_id_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import List, Union, Optional +from typing import Union, Optional from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ...._types import SequenceNotStr + __all__ = ["ActionInitiateByIDParams", "VolumeActionPostAttach", "VolumeActionPostDetach", "VolumeActionPostResize"] @@ -46,7 +48,7 @@ class VolumeActionPostAttach(TypedDict, total=False): available. """ - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names may be for either existing or new tags. diff --git a/src/gradient/types/gpu_droplets/volumes/action_initiate_by_name_params.py b/src/gradient/types/gpu_droplets/volumes/action_initiate_by_name_params.py index d1a7d084..f37d6d9a 100644 --- a/src/gradient/types/gpu_droplets/volumes/action_initiate_by_name_params.py +++ b/src/gradient/types/gpu_droplets/volumes/action_initiate_by_name_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import List, Union, Optional +from typing import Union, Optional from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ...._types import SequenceNotStr + __all__ = ["ActionInitiateByNameParams", "VolumeActionPostAttach", "VolumeActionPostDetach"] @@ -46,7 +48,7 @@ class VolumeActionPostAttach(TypedDict, total=False): available. """ - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names may be for either existing or new tags. diff --git a/src/gradient/types/gpu_droplets/volumes/snapshot_create_params.py b/src/gradient/types/gpu_droplets/volumes/snapshot_create_params.py index 8cce4a59..890dd302 100644 --- a/src/gradient/types/gpu_droplets/volumes/snapshot_create_params.py +++ b/src/gradient/types/gpu_droplets/volumes/snapshot_create_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from typing_extensions import Required, TypedDict +from ...._types import SequenceNotStr + __all__ = ["SnapshotCreateParams"] @@ -12,7 +14,7 @@ class SnapshotCreateParams(TypedDict, total=False): name: Required[str] """A human-readable name for the volume snapshot.""" - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names may be for either existing or new tags. diff --git a/src/gradient/types/knowledge_base_create_params.py b/src/gradient/types/knowledge_base_create_params.py index 5c0df9a6..e40bd598 100644 --- a/src/gradient/types/knowledge_base_create_params.py +++ b/src/gradient/types/knowledge_base_create_params.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import List, Iterable +from typing import Iterable from typing_extensions import TypedDict +from .._types import SequenceNotStr from .knowledge_bases.aws_data_source_param import AwsDataSourceParam from .knowledge_bases.api_spaces_data_source_param import APISpacesDataSourceParam from .knowledge_bases.api_file_upload_data_source_param import APIFileUploadDataSourceParam @@ -44,7 +45,7 @@ class KnowledgeBaseCreateParams(TypedDict, total=False): region: str """The datacenter region to deploy the knowledge base in.""" - tags: List[str] + tags: SequenceNotStr[str] """Tags to organize your knowledge base.""" vpc_uuid: str diff --git a/src/gradient/types/knowledge_base_update_params.py b/src/gradient/types/knowledge_base_update_params.py index 7a86b40c..cfb52016 100644 --- a/src/gradient/types/knowledge_base_update_params.py +++ b/src/gradient/types/knowledge_base_update_params.py @@ -2,9 +2,9 @@ from __future__ import annotations -from typing import List from typing_extensions import Annotated, TypedDict +from .._types import SequenceNotStr from .._utils import PropertyInfo __all__ = ["KnowledgeBaseUpdateParams"] @@ -23,7 +23,7 @@ class KnowledgeBaseUpdateParams(TypedDict, total=False): project_id: str """The id of the DigitalOcean project this knowledge base will belong to""" - tags: List[str] + tags: SequenceNotStr[str] """Tags to organize your knowledge base.""" body_uuid: Annotated[str, PropertyInfo(alias="uuid")] diff --git a/src/gradient/types/knowledge_bases/indexing_job_create_params.py b/src/gradient/types/knowledge_bases/indexing_job_create_params.py index d92c5790..ebd8632b 100644 --- a/src/gradient/types/knowledge_bases/indexing_job_create_params.py +++ b/src/gradient/types/knowledge_bases/indexing_job_create_params.py @@ -2,14 +2,15 @@ from __future__ import annotations -from typing import List from typing_extensions import TypedDict +from ..._types import SequenceNotStr + __all__ = ["IndexingJobCreateParams"] class IndexingJobCreateParams(TypedDict, total=False): - data_source_uuids: List[str] + data_source_uuids: SequenceNotStr[str] """ List of data source ids to index, if none are provided, all data sources will be indexed diff --git a/src/gradient/types/shared_params/firewall_rule_target.py b/src/gradient/types/shared_params/firewall_rule_target.py index 49a5f75c..7f317f6c 100644 --- a/src/gradient/types/shared_params/firewall_rule_target.py +++ b/src/gradient/types/shared_params/firewall_rule_target.py @@ -2,14 +2,16 @@ from __future__ import annotations -from typing import List, Iterable, Optional +from typing import Iterable, Optional from typing_extensions import TypedDict +from ..._types import SequenceNotStr + __all__ = ["FirewallRuleTarget"] class FirewallRuleTarget(TypedDict, total=False): - addresses: List[str] + addresses: SequenceNotStr[str] """ An array of strings containing the IPv4 addresses, IPv6 addresses, IPv4 CIDRs, and/or IPv6 CIDRs to which the firewall will allow traffic. @@ -21,19 +23,19 @@ class FirewallRuleTarget(TypedDict, total=False): traffic. """ - kubernetes_ids: List[str] + kubernetes_ids: SequenceNotStr[str] """ An array containing the IDs of the Kubernetes clusters to which the firewall will allow traffic. """ - load_balancer_uids: List[str] + load_balancer_uids: SequenceNotStr[str] """ An array containing the IDs of the load balancers to which the firewall will allow traffic. """ - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names must exist in order to be referenced in a request. diff --git a/tests/test_client.py b/tests/test_client.py index 9422604d..98833ff2 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -6,13 +6,10 @@ import os import sys import json -import time import asyncio import inspect -import subprocess import tracemalloc from typing import Any, Union, cast -from textwrap import dedent from unittest import mock from typing_extensions import Literal @@ -23,6 +20,7 @@ from gradient import Gradient, AsyncGradient, APIResponseValidationError from gradient._types import Omit +from gradient._utils import asyncify from gradient._models import BaseModel, FinalRequestOptions from gradient._streaming import Stream, AsyncStream from gradient._exceptions import ( @@ -34,8 +32,10 @@ DEFAULT_TIMEOUT, HTTPX_DEFAULT_TIMEOUT, BaseClient, + OtherPlatform, DefaultHttpxClient, DefaultAsyncHttpxClient, + get_platform, make_request_options, ) @@ -2058,52 +2058,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: assert response.http_request.headers.get("x-stainless-retry-count") == "42" - def test_get_platform(self) -> None: - # A previous implementation of asyncify could leave threads unterminated when - # used with nest_asyncio. - # - # Since nest_asyncio.apply() is global and cannot be un-applied, this - # test is run in a separate process to avoid affecting other tests. - test_code = dedent( - """ - import asyncio - import nest_asyncio - import threading - - from gradient._utils import asyncify - from gradient._base_client import get_platform - - async def test_main() -> None: - result = await asyncify(get_platform)() - print(result) - for thread in threading.enumerate(): - print(thread.name) - - nest_asyncio.apply() - asyncio.run(test_main()) - """ - ) - with subprocess.Popen( - [sys.executable, "-c", test_code], - text=True, - ) as process: - timeout = 10 # seconds - - start_time = time.monotonic() - while True: - return_code = process.poll() - if return_code is not None: - if return_code != 0: - raise AssertionError("calling get_platform using asyncify resulted in a non-zero exit code") - - # success - break - - if time.monotonic() - start_time > timeout: - process.kill() - raise AssertionError("calling get_platform using asyncify resulted in a hung process") - - time.sleep(0.1) + async def test_get_platform(self) -> None: + platform = await asyncify(get_platform)() + assert isinstance(platform, (str, OtherPlatform)) async def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None: # Test that the proxy environment variables are set correctly diff --git a/tests/test_models.py b/tests/test_models.py index 9a2ee908..de5ef465 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -8,7 +8,7 @@ from pydantic import Field from gradient._utils import PropertyInfo -from gradient._compat import PYDANTIC_V2, parse_obj, model_dump, model_json +from gradient._compat import PYDANTIC_V1, parse_obj, model_dump, model_json from gradient._models import BaseModel, construct_type @@ -294,12 +294,12 @@ class Model(BaseModel): assert cast(bool, m.foo) is True m = Model.construct(foo={"name": 3}) - if PYDANTIC_V2: - assert isinstance(m.foo, Submodel1) - assert m.foo.name == 3 # type: ignore - else: + if PYDANTIC_V1: assert isinstance(m.foo, Submodel2) assert m.foo.name == "3" + else: + assert isinstance(m.foo, Submodel1) + assert m.foo.name == 3 # type: ignore def test_list_of_unions() -> None: @@ -426,10 +426,10 @@ class Model(BaseModel): expected = datetime(2019, 12, 27, 18, 11, 19, 117000, tzinfo=timezone.utc) - if PYDANTIC_V2: - expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}' - else: + if PYDANTIC_V1: expected_json = '{"created_at": "2019-12-27T18:11:19.117000+00:00"}' + else: + expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}' model = Model.construct(created_at="2019-12-27T18:11:19.117Z") assert model.created_at == expected @@ -531,7 +531,7 @@ class Model2(BaseModel): assert m4.to_dict(mode="python") == {"created_at": datetime.fromisoformat(time_str)} assert m4.to_dict(mode="json") == {"created_at": time_str} - if not PYDANTIC_V2: + if PYDANTIC_V1: with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): m.to_dict(warnings=False) @@ -556,7 +556,7 @@ class Model(BaseModel): assert m3.model_dump() == {"foo": None} assert m3.model_dump(exclude_none=True) == {} - if not PYDANTIC_V2: + if PYDANTIC_V1: with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"): m.model_dump(round_trip=True) @@ -580,10 +580,10 @@ class Model(BaseModel): assert json.loads(m.to_json()) == {"FOO": "hello"} assert json.loads(m.to_json(use_api_names=False)) == {"foo": "hello"} - if PYDANTIC_V2: - assert m.to_json(indent=None) == '{"FOO":"hello"}' - else: + if PYDANTIC_V1: assert m.to_json(indent=None) == '{"FOO": "hello"}' + else: + assert m.to_json(indent=None) == '{"FOO":"hello"}' m2 = Model() assert json.loads(m2.to_json()) == {} @@ -595,7 +595,7 @@ class Model(BaseModel): assert json.loads(m3.to_json()) == {"FOO": None} assert json.loads(m3.to_json(exclude_none=True)) == {} - if not PYDANTIC_V2: + if PYDANTIC_V1: with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): m.to_json(warnings=False) @@ -622,7 +622,7 @@ class Model(BaseModel): assert json.loads(m3.model_dump_json()) == {"foo": None} assert json.loads(m3.model_dump_json(exclude_none=True)) == {} - if not PYDANTIC_V2: + if PYDANTIC_V1: with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"): m.model_dump_json(round_trip=True) @@ -679,12 +679,12 @@ class B(BaseModel): ) assert isinstance(m, A) assert m.type == "a" - if PYDANTIC_V2: - assert m.data == 100 # type: ignore[comparison-overlap] - else: + if PYDANTIC_V1: # pydantic v1 automatically converts inputs to strings # if the expected type is a str assert m.data == "100" + else: + assert m.data == 100 # type: ignore[comparison-overlap] def test_discriminated_unions_unknown_variant() -> None: @@ -768,12 +768,12 @@ class B(BaseModel): ) assert isinstance(m, A) assert m.foo_type == "a" - if PYDANTIC_V2: - assert m.data == 100 # type: ignore[comparison-overlap] - else: + if PYDANTIC_V1: # pydantic v1 automatically converts inputs to strings # if the expected type is a str assert m.data == "100" + else: + assert m.data == 100 # type: ignore[comparison-overlap] def test_discriminated_unions_overlapping_discriminators_invalid_data() -> None: @@ -833,7 +833,7 @@ class B(BaseModel): assert UnionType.__discriminator__ is discriminator -@pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1") +@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1") def test_type_alias_type() -> None: Alias = TypeAliasType("Alias", str) # pyright: ignore @@ -849,7 +849,7 @@ class Model(BaseModel): assert m.union == "bar" -@pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1") +@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1") def test_field_named_cls() -> None: class Model(BaseModel): cls: str @@ -936,7 +936,7 @@ class Type2(BaseModel): assert isinstance(model.value, InnerType2) -@pytest.mark.skipif(not PYDANTIC_V2, reason="this is only supported in pydantic v2 for now") +@pytest.mark.skipif(PYDANTIC_V1, reason="this is only supported in pydantic v2 for now") def test_extra_properties() -> None: class Item(BaseModel): prop: int diff --git a/tests/test_transform.py b/tests/test_transform.py index 552462fa..db909f25 100644 --- a/tests/test_transform.py +++ b/tests/test_transform.py @@ -15,7 +15,7 @@ parse_datetime, async_transform as _async_transform, ) -from gradient._compat import PYDANTIC_V2 +from gradient._compat import PYDANTIC_V1 from gradient._models import BaseModel _T = TypeVar("_T") @@ -189,7 +189,7 @@ class DateModel(BaseModel): @pytest.mark.asyncio async def test_iso8601_format(use_async: bool) -> None: dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") - tz = "Z" if PYDANTIC_V2 else "+00:00" + tz = "+00:00" if PYDANTIC_V1 else "Z" assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap] assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692" + tz} # type: ignore[comparison-overlap] @@ -297,11 +297,11 @@ async def test_pydantic_unknown_field(use_async: bool) -> None: @pytest.mark.asyncio async def test_pydantic_mismatched_types(use_async: bool) -> None: model = MyModel.construct(foo=True) - if PYDANTIC_V2: + if PYDANTIC_V1: + params = await transform(model, Any, use_async) + else: with pytest.warns(UserWarning): params = await transform(model, Any, use_async) - else: - params = await transform(model, Any, use_async) assert cast(Any, params) == {"foo": True} @@ -309,11 +309,11 @@ async def test_pydantic_mismatched_types(use_async: bool) -> None: @pytest.mark.asyncio async def test_pydantic_mismatched_object_type(use_async: bool) -> None: model = MyModel.construct(foo=MyModel.construct(hello="world")) - if PYDANTIC_V2: + if PYDANTIC_V1: + params = await transform(model, Any, use_async) + else: with pytest.warns(UserWarning): params = await transform(model, Any, use_async) - else: - params = await transform(model, Any, use_async) assert cast(Any, params) == {"foo": {"hello": "world"}} diff --git a/tests/test_utils/test_datetime_parse.py b/tests/test_utils/test_datetime_parse.py new file mode 100644 index 00000000..6cbb1b6f --- /dev/null +++ b/tests/test_utils/test_datetime_parse.py @@ -0,0 +1,110 @@ +""" +Copied from https://github.com/pydantic/pydantic/blob/v1.10.22/tests/test_datetime_parse.py +with modifications so it works without pydantic v1 imports. +""" + +from typing import Type, Union +from datetime import date, datetime, timezone, timedelta + +import pytest + +from gradient._utils import parse_date, parse_datetime + + +def create_tz(minutes: int) -> timezone: + return timezone(timedelta(minutes=minutes)) + + +@pytest.mark.parametrize( + "value,result", + [ + # Valid inputs + ("1494012444.883309", date(2017, 5, 5)), + (b"1494012444.883309", date(2017, 5, 5)), + (1_494_012_444.883_309, date(2017, 5, 5)), + ("1494012444", date(2017, 5, 5)), + (1_494_012_444, date(2017, 5, 5)), + (0, date(1970, 1, 1)), + ("2012-04-23", date(2012, 4, 23)), + (b"2012-04-23", date(2012, 4, 23)), + ("2012-4-9", date(2012, 4, 9)), + (date(2012, 4, 9), date(2012, 4, 9)), + (datetime(2012, 4, 9, 12, 15), date(2012, 4, 9)), + # Invalid inputs + ("x20120423", ValueError), + ("2012-04-56", ValueError), + (19_999_999_999, date(2603, 10, 11)), # just before watershed + (20_000_000_001, date(1970, 8, 20)), # just after watershed + (1_549_316_052, date(2019, 2, 4)), # nowish in s + (1_549_316_052_104, date(2019, 2, 4)), # nowish in ms + (1_549_316_052_104_324, date(2019, 2, 4)), # nowish in μs + (1_549_316_052_104_324_096, date(2019, 2, 4)), # nowish in ns + ("infinity", date(9999, 12, 31)), + ("inf", date(9999, 12, 31)), + (float("inf"), date(9999, 12, 31)), + ("infinity ", date(9999, 12, 31)), + (int("1" + "0" * 100), date(9999, 12, 31)), + (1e1000, date(9999, 12, 31)), + ("-infinity", date(1, 1, 1)), + ("-inf", date(1, 1, 1)), + ("nan", ValueError), + ], +) +def test_date_parsing(value: Union[str, bytes, int, float], result: Union[date, Type[Exception]]) -> None: + if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance] + with pytest.raises(result): + parse_date(value) + else: + assert parse_date(value) == result + + +@pytest.mark.parametrize( + "value,result", + [ + # Valid inputs + # values in seconds + ("1494012444.883309", datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), + (1_494_012_444.883_309, datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), + ("1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + (b"1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + (1_494_012_444, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + # values in ms + ("1494012444000.883309", datetime(2017, 5, 5, 19, 27, 24, 883, tzinfo=timezone.utc)), + ("-1494012444000.883309", datetime(1922, 8, 29, 4, 32, 35, 999117, tzinfo=timezone.utc)), + (1_494_012_444_000, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + ("2012-04-23T09:15:00", datetime(2012, 4, 23, 9, 15)), + ("2012-4-9 4:8:16", datetime(2012, 4, 9, 4, 8, 16)), + ("2012-04-23T09:15:00Z", datetime(2012, 4, 23, 9, 15, 0, 0, timezone.utc)), + ("2012-4-9 4:8:16-0320", datetime(2012, 4, 9, 4, 8, 16, 0, create_tz(-200))), + ("2012-04-23T10:20:30.400+02:30", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(150))), + ("2012-04-23T10:20:30.400+02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(120))), + ("2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), + (b"2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), + (datetime(2017, 5, 5), datetime(2017, 5, 5)), + (0, datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc)), + # Invalid inputs + ("x20120423091500", ValueError), + ("2012-04-56T09:15:90", ValueError), + ("2012-04-23T11:05:00-25:00", ValueError), + (19_999_999_999, datetime(2603, 10, 11, 11, 33, 19, tzinfo=timezone.utc)), # just before watershed + (20_000_000_001, datetime(1970, 8, 20, 11, 33, 20, 1000, tzinfo=timezone.utc)), # just after watershed + (1_549_316_052, datetime(2019, 2, 4, 21, 34, 12, 0, tzinfo=timezone.utc)), # nowish in s + (1_549_316_052_104, datetime(2019, 2, 4, 21, 34, 12, 104_000, tzinfo=timezone.utc)), # nowish in ms + (1_549_316_052_104_324, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in μs + (1_549_316_052_104_324_096, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in ns + ("infinity", datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("inf", datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("inf ", datetime(9999, 12, 31, 23, 59, 59, 999999)), + (1e50, datetime(9999, 12, 31, 23, 59, 59, 999999)), + (float("inf"), datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("-infinity", datetime(1, 1, 1, 0, 0)), + ("-inf", datetime(1, 1, 1, 0, 0)), + ("nan", ValueError), + ], +) +def test_datetime_parsing(value: Union[str, bytes, int, float], result: Union[datetime, Type[Exception]]) -> None: + if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance] + with pytest.raises(result): + parse_datetime(value) + else: + assert parse_datetime(value) == result diff --git a/tests/utils.py b/tests/utils.py index ac014538..8d9112d6 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -19,7 +19,7 @@ is_annotated_type, is_type_alias_type, ) -from gradient._compat import PYDANTIC_V2, field_outer_type, get_model_fields +from gradient._compat import PYDANTIC_V1, field_outer_type, get_model_fields from gradient._models import BaseModel BaseModelT = TypeVar("BaseModelT", bound=BaseModel) @@ -28,12 +28,12 @@ def assert_matches_model(model: type[BaseModelT], value: BaseModelT, *, path: list[str]) -> bool: for name, field in get_model_fields(model).items(): field_value = getattr(value, name) - if PYDANTIC_V2: - allow_none = False - else: + if PYDANTIC_V1: # in v1 nullability was structured differently # https://docs.pydantic.dev/2.0/migration/#required-optional-and-nullable-fields allow_none = getattr(field, "allow_none", False) + else: + allow_none = False assert_matches_type( field_outer_type(field),