diff --git a/Makefile b/Makefile index 495427b8..b90dc35f 100644 --- a/Makefile +++ b/Makefile @@ -32,7 +32,7 @@ help:: mypy: $(call FORALL_PKGS,mypy) help:: - @echo " mypy -- Run mypy on all files" + @echo " mypy -- Run mypy on all files" ruff: @uv run ruff check --fix diff --git a/src/storage/run-unasync.py b/src/storage/run-unasync.py index 936a2628..7c1a7702 100644 --- a/src/storage/run-unasync.py +++ b/src/storage/run-unasync.py @@ -5,7 +5,15 @@ paths = Path("src/storage3").glob("**/*.py") tests = Path("tests").glob("**/*.py") -rules = (unasync._DEFAULT_RULE,) +rules = ( + unasync.Rule( + fromdir="/_async/", + todir="/_sync/", + additional_replacements={"AsyncClient": "Client"}, + ), + unasync._DEFAULT_RULE, +) + files = [str(p) for p in list(paths) + list(tests)] diff --git a/src/storage/src/storage3/_async/analytics.py b/src/storage/src/storage3/_async/analytics.py new file mode 100644 index 00000000..ace7d7a2 --- /dev/null +++ b/src/storage/src/storage3/_async/analytics.py @@ -0,0 +1,48 @@ +from typing import List, Optional + +from httpx import QueryParams + +from ..types import ( + AnalyticsBucket, + AnalyticsBucketDeleteResponse, + AnalyticsBucketsParser, + SortColumn, + SortOrder, +) +from .request import AsyncRequestBuilder + + +class AsyncStorageAnalyticsClient: + def __init__(self, request: AsyncRequestBuilder) -> None: + self._request = request + + async def create(self, bucket_name: str) -> AnalyticsBucket: + body = {"name": bucket_name} + data = await self._request.send(http_method="POST", path=["bucket"], body=body) + return AnalyticsBucket.model_validate(data.content) + + async def list( + self, + limit: Optional[int] = None, + offset: Optional[int] = None, + sort_column: Optional[SortColumn] = None, + sort_order: Optional[SortOrder] = None, + search: Optional[str] = None, + ) -> List[AnalyticsBucket]: + params = QueryParams( + limit=limit, + offset=offset, + sort_column=sort_column, + sort_order=sort_order, + search=search, + ) + data = await self._request.send( + http_method="GET", path=["bucket"], query_params=params + ) + return AnalyticsBucketsParser.validate_json(data.content) + + async def delete(self, bucket_name: str) -> AnalyticsBucketDeleteResponse: + data = await self._request.send( + http_method="DELETE", path=["bucket", bucket_name] + ) + return AnalyticsBucketDeleteResponse.model_validate(data.content) diff --git a/src/storage/src/storage3/_async/client.py b/src/storage/src/storage3/_async/client.py index fdb55825..20c884b5 100644 --- a/src/storage/src/storage3/_async/client.py +++ b/src/storage/src/storage3/_async/client.py @@ -10,6 +10,7 @@ from ..version import __version__ from .bucket import AsyncStorageBucketAPI from .file_api import AsyncBucketProxy +from .vectors import AsyncStorageVectorsClient __all__ = [ "AsyncStorageClient", @@ -80,3 +81,10 @@ def from_(self, id: str) -> AsyncBucketProxy: The unique identifier of the bucket """ return AsyncBucketProxy(id, self._base_url, self._headers, self._client) + + def vectors(self) -> AsyncStorageVectorsClient: + return AsyncStorageVectorsClient( + url=self._base_url.joinpath("v1", "vector"), + headers=self._headers, + session=self.session, + ) diff --git a/src/storage/src/storage3/_async/request.py b/src/storage/src/storage3/_async/request.py new file mode 100644 index 00000000..ad1c7279 --- /dev/null +++ b/src/storage/src/storage3/_async/request.py @@ -0,0 +1,47 @@ +from typing import Optional + +from httpx import AsyncClient, Headers, HTTPStatusError, QueryParams, Response +from pydantic import ValidationError +from yarl import URL + +from ..exceptions import StorageApiError, VectorBucketErrorMessage +from ..types import JSON, RequestMethod + + +class AsyncRequestBuilder: + def __init__(self, session: AsyncClient, base_url: URL, headers: Headers) -> None: + self._session = session + self._base_url = base_url + self.headers = headers + + async def send( + self, + http_method: RequestMethod, + path: list[str], + body: JSON = None, + query_params: Optional[QueryParams] = None, + ) -> Response: + response = await self._session.request( + method=http_method, + json=body, + url=str(self._base_url.joinpath(*path)), + headers=self.headers, + params=query_params or QueryParams(), + ) + try: + response.raise_for_status() + return response + except HTTPStatusError as exc: + try: + error = VectorBucketErrorMessage.model_validate_json(response.content) + raise StorageApiError( + message=error.message, + code=error.code or "400", + status=error.statusCode, + ) from exc + except ValidationError as exc: + raise StorageApiError( + message="The request failed, but could not parse error message response.", + code="LibraryError", + status=response.status_code, + ) from exc diff --git a/src/storage/src/storage3/_async/vectors.py b/src/storage/src/storage3/_async/vectors.py new file mode 100644 index 00000000..5f54a845 --- /dev/null +++ b/src/storage/src/storage3/_async/vectors.py @@ -0,0 +1,213 @@ +from __future__ import annotations + +from typing import List, Optional + +from httpx import AsyncClient, Headers +from yarl import URL + +from ..exceptions import StorageApiError, VectorBucketException +from ..types import ( + JSON, + DistanceMetric, + GetVectorBucketResponse, + GetVectorIndexResponse, + GetVectorsResponse, + ListVectorBucketsResponse, + ListVectorIndexesResponse, + ListVectorsResponse, + MetadataConfiguration, + QueryVectorsResponse, + VectorBucket, + VectorData, + VectorFilter, + VectorIndex, + VectorMatch, + VectorObject, +) +from .request import AsyncRequestBuilder + + +# used to not send non-required values as `null` +# for they cannot be null +def remove_none(**kwargs: JSON) -> JSON: + return {key: val for key, val in kwargs.items() if val is not None} + + +class AsyncVectorBucketScope: + def __init__(self, request: AsyncRequestBuilder, bucket_name: str) -> None: + self._request = request + self._bucket_name = bucket_name + + def with_metadata(self, **data: JSON) -> JSON: + return remove_none(vectorBucketName=self._bucket_name, **data) + + async def create_index( + self, + index_name: str, + dimension: int, + distance_metric: DistanceMetric, + data_type: str, + metadata: Optional[MetadataConfiguration] = None, + ) -> None: + body = self.with_metadata( + indexName=index_name, + dimension=dimension, + distanceMetric=distance_metric, + dataType=data_type, + metadataConfiguration=dict(metadata) if metadata else None, + ) + await self._request.send(http_method="POST", path=["CreateIndex"], body=body) + + async def get_index(self, index_name: str) -> Optional[VectorIndex]: + body = self.with_metadata(indexName=index_name) + try: + data = await self._request.send( + http_method="POST", path=["GetIndex"], body=body + ) + return GetVectorIndexResponse.model_validate_json(data.content).index + except StorageApiError: + return None + + async def list_indexes( + self, + next_token: Optional[str] = None, + max_results: Optional[int] = None, + prefix: Optional[str] = None, + ) -> ListVectorIndexesResponse: + body = self.with_metadata( + next_token=next_token, max_results=max_results, prefix=prefix + ) + data = await self._request.send( + http_method="POST", path=["ListIndexes"], body=body + ) + return ListVectorIndexesResponse.model_validate_json(data.content) + + async def delete_index(self, index_name: str) -> None: + body = self.with_metadata(indexName=index_name) + await self._request.send(http_method="POST", path=["DeleteIndex"], body=body) + + def index(self, index_name: str) -> AsyncVectorIndexScope: + return AsyncVectorIndexScope(self._request, self._bucket_name, index_name) + + +class AsyncVectorIndexScope: + def __init__( + self, request: AsyncRequestBuilder, bucket_name: str, index_name: str + ) -> None: + self._request = request + self._bucket_name = bucket_name + self._index_name = index_name + + def with_metadata(self, **data: JSON) -> JSON: + return remove_none( + vectorBucketName=self._bucket_name, + indexName=self._index_name, + **data, + ) + + async def put(self, vectors: List[VectorObject]) -> None: + body = self.with_metadata(vectors=[v.as_json() for v in vectors]) + await self._request.send(http_method="POST", path=["PutVectors"], body=body) + + async def get( + self, *keys: str, return_data: bool = True, return_metadata: bool = True + ) -> List[VectorMatch]: + body = self.with_metadata( + keys=keys, returnData=return_data, returnMetadata=return_metadata + ) + data = await self._request.send( + http_method="POST", path=["GetVectors"], body=body + ) + return GetVectorsResponse.model_validate_json(data.content).vectors + + async def list( + self, + max_results: Optional[int] = None, + next_token: Optional[str] = None, + return_data: bool = True, + return_metadata: bool = True, + segment_count: Optional[int] = None, + segment_index: Optional[int] = None, + ) -> ListVectorsResponse: + body = self.with_metadata( + maxResults=max_results, + nextToken=next_token, + returnData=return_data, + returnMetadata=return_metadata, + segmentCount=segment_count, + segmentIndex=segment_index, + ) + data = await self._request.send( + http_method="POST", path=["ListVectors"], body=body + ) + return ListVectorsResponse.model_validate_json(data.content) + + async def query( + self, + query_vector: VectorData, + topK: Optional[int] = None, + filter: Optional[VectorFilter] = None, + return_distance: bool = True, + return_metadata: bool = True, + ) -> QueryVectorsResponse: + body = self.with_metadata( + queryVector=dict(query_vector), + topK=topK, + filter=filter, + returnDistance=return_distance, + returnMetadata=return_metadata, + ) + data = await self._request.send( + http_method="POST", path=["QueryVectors"], body=body + ) + return QueryVectorsResponse.model_validate_json(data.content) + + async def delete(self, keys: List[str]) -> None: + if 1 < len(keys) or len(keys) > 500: + raise VectorBucketException("Keys batch size must be between 1 and 500.") + body = self.with_metadata(keys=keys) + await self._request.send(http_method="POST", path=["DeleteVectors"], body=body) + + +class AsyncStorageVectorsClient: + def __init__(self, url: URL, headers: Headers, session: AsyncClient) -> None: + self._request = AsyncRequestBuilder(session, base_url=URL(url), headers=headers) + + def from_(self, bucket_name: str) -> AsyncVectorBucketScope: + return AsyncVectorBucketScope(self._request, bucket_name) + + async def create_bucket(self, bucket_name: str) -> None: + body = {"vectorBucketName": bucket_name} + await self._request.send( + http_method="POST", path=["CreateVectorBucket"], body=body + ) + + async def get_bucket(self, bucket_name: str) -> Optional[VectorBucket]: + body = {"vectorBucketName": bucket_name} + try: + data = await self._request.send( + http_method="POST", path=["GetVectorBucket"], body=body + ) + return GetVectorBucketResponse.model_validate_json( + data.content + ).vectorBucket + except StorageApiError: + return None + + async def list_buckets( + self, + prefix: Optional[str] = None, + max_results: Optional[int] = None, + next_token: Optional[str] = None, + ) -> ListVectorBucketsResponse: + body = remove_none(prefix=prefix, maxResults=max_results, nextToken=next_token) + data = await self._request.send( + http_method="POST", path=["ListVectorBuckets"], body=body + ) + return ListVectorBucketsResponse.model_validate_json(data.content) + + async def delete_bucket(self, bucket_name: str) -> None: + body = {"vectorBucketName": bucket_name} + await self._request.send( + http_method="POST", path=["DeleteVectorBucket"], body=body + ) diff --git a/src/storage/src/storage3/_sync/analytics.py b/src/storage/src/storage3/_sync/analytics.py new file mode 100644 index 00000000..991f9e6b --- /dev/null +++ b/src/storage/src/storage3/_sync/analytics.py @@ -0,0 +1,46 @@ +from typing import List, Optional + +from httpx import QueryParams + +from ..types import ( + AnalyticsBucket, + AnalyticsBucketDeleteResponse, + AnalyticsBucketsParser, + SortColumn, + SortOrder, +) +from .request import RequestBuilder + + +class SyncStorageAnalyticsClient: + def __init__(self, request: RequestBuilder) -> None: + self._request = request + + def create(self, bucket_name: str) -> AnalyticsBucket: + body = {"name": bucket_name} + data = self._request.send(http_method="POST", path=["bucket"], body=body) + return AnalyticsBucket.model_validate(data.content) + + def list( + self, + limit: Optional[int] = None, + offset: Optional[int] = None, + sort_column: Optional[SortColumn] = None, + sort_order: Optional[SortOrder] = None, + search: Optional[str] = None, + ) -> List[AnalyticsBucket]: + params = QueryParams( + limit=limit, + offset=offset, + sort_column=sort_column, + sort_order=sort_order, + search=search, + ) + data = self._request.send( + http_method="GET", path=["bucket"], query_params=params + ) + return AnalyticsBucketsParser.validate_json(data.content) + + def delete(self, bucket_name: str) -> AnalyticsBucketDeleteResponse: + data = self._request.send(http_method="DELETE", path=["bucket", bucket_name]) + return AnalyticsBucketDeleteResponse.model_validate(data.content) diff --git a/src/storage/src/storage3/_sync/client.py b/src/storage/src/storage3/_sync/client.py index 7c50fd77..47819c87 100644 --- a/src/storage/src/storage3/_sync/client.py +++ b/src/storage/src/storage3/_sync/client.py @@ -10,6 +10,7 @@ from ..version import __version__ from .bucket import SyncStorageBucketAPI from .file_api import SyncBucketProxy +from .vectors import SyncStorageVectorsClient __all__ = [ "SyncStorageClient", @@ -80,3 +81,10 @@ def from_(self, id: str) -> SyncBucketProxy: The unique identifier of the bucket """ return SyncBucketProxy(id, self._base_url, self._headers, self._client) + + def vectors(self) -> SyncStorageVectorsClient: + return SyncStorageVectorsClient( + url=self._base_url, + headers=self._headers, + session=self.session, + ) diff --git a/src/storage/src/storage3/_sync/request.py b/src/storage/src/storage3/_sync/request.py new file mode 100644 index 00000000..28669286 --- /dev/null +++ b/src/storage/src/storage3/_sync/request.py @@ -0,0 +1,28 @@ +from typing import Optional + +from httpx import Client, Headers, QueryParams, Response +from yarl import URL + +from ..types import JSON, RequestMethod + + +class RequestBuilder: + def __init__(self, session: Client, base_url: URL, headers: Headers) -> None: + self._session = session + self._base_url = base_url + self.headers = headers + + def send( + self, + http_method: RequestMethod, + path: list[str], + body: JSON = None, + query_params: Optional[QueryParams] = None, + ) -> Response: + return self._session.request( + method=http_method, + json=body, + url=str(self._base_url.joinpath(*path)), + headers=self.headers, + params=query_params or QueryParams(), + ) diff --git a/src/storage/src/storage3/_sync/vectors.py b/src/storage/src/storage3/_sync/vectors.py new file mode 100644 index 00000000..567d8234 --- /dev/null +++ b/src/storage/src/storage3/_sync/vectors.py @@ -0,0 +1,151 @@ +from __future__ import annotations + +from typing import List, Optional + +from httpx import Client, Headers +from yarl import URL + +from ..exceptions import VectorBucketException +from ..types import ( + JSON, + DistanceMetric, + GetVectorsResponse, + ListVectorIndexesResponse, + ListVectorsResponse, + MetadataConfiguration, + QueryVectorsResponse, + VectorData, + VectorFilter, + VectorIndex, + VectorObject, +) +from .request import RequestBuilder + + +class SyncVectorBucketScope: + def __init__(self, request: RequestBuilder, bucket_name: str) -> None: + self._request = request + self._bucket_name = bucket_name + + def with_metadata(self, **data: JSON) -> JSON: + return {"vectorBucketName": self._bucket_name, **data} + + def create_index( + self, + dimension: int, + distance_metric: DistanceMetric, + data_type: str, + metadata: Optional[MetadataConfiguration] = None, + ) -> None: + body = self.with_metadata( + dimension=dimension, + distanceMetric=distance_metric, + dataType=data_type, + metadataConfiguration=dict(metadata) if metadata else None, + ) + self._request.send(http_method="POST", path=["CreateIndex"], body=body) + + def get_index(self, index_name: str) -> VectorIndex: + body = self.with_metadata(indexName=index_name) + data = self._request.send(http_method="POST", path=["GetIndex"], body=body) + return VectorIndex.model_validate(data.content) + + def list_indexes( + self, + next_token: Optional[str] = None, + max_results: Optional[int] = None, + prefix: Optional[str] = None, + ) -> ListVectorIndexesResponse: + body = self.with_metadata( + next_token=next_token, max_results=max_results, prefix=prefix + ) + data = self._request.send(http_method="POST", path=["ListIndexes"], body=body) + return ListVectorIndexesResponse.model_validate(data.content) + + def delete_index(self, index_name: str) -> None: + body = self.with_metadata(indexName=index_name) + self._request.send(http_method="POST", path=["DeleteIndex"], body=body) + + def index(self, index_name: str) -> SyncVectorIndexScope: + return SyncVectorIndexScope(self._request, self._bucket_name, index_name) + + +class SyncVectorIndexScope: + def __init__( + self, request: RequestBuilder, bucket_name: str, index_name: str + ) -> None: + self._request = request + self._bucket_name = bucket_name + self._index_name = index_name + + def with_metadata(self, **data: JSON) -> JSON: + return { + "vectorBucketName": self._bucket_name, + "indexName": self._index_name, + **data, + } + + def put(self, vectors: List[VectorObject]) -> None: + body = self.with_metadata(vectors=list(dict(v) for v in vectors)) + self._request.send(http_method="POST", path=["PutVectors"], body=body) + + def get( + self, *keys: str, return_data: bool = True, return_metadata: bool = True + ) -> GetVectorsResponse: + body = self.with_metadata( + keys=keys, returnData=return_data, returnMetadata=return_metadata + ) + data = self._request.send(http_method="POST", path=["GetVectors"], body=body) + return GetVectorsResponse.model_validate(data.content) + + def list( + self, + max_results: Optional[int] = None, + next_token: Optional[str] = None, + return_data: bool = True, + return_metadata: bool = True, + segment_count: Optional[int] = None, + segment_index: Optional[int] = None, + ) -> ListVectorsResponse: + body = self.with_metadata( + maxResults=max_results, + nextToken=next_token, + returnData=return_data, + returnMetadata=return_metadata, + segmentCount=segment_count, + segmentIndex=segment_index, + ) + data = self._request.send(http_method="POST", path=["ListVectors"], body=body) + return ListVectorsResponse.model_validate(data.content) + + def query( + self, + query_vector: VectorData, + topK: Optional[int] = None, + filter: Optional[VectorFilter] = None, + return_distance: bool = True, + return_metadata: bool = True, + ) -> QueryVectorsResponse: + body = self.with_metadata( + queryVector=dict(query_vector), + topK=topK, + filter=filter, + returnDistance=return_distance, + returnMetadata=return_metadata, + ) + data = self._request.send(http_method="POST", path=["QueryVectors"], body=body) + return QueryVectorsResponse.model_validate(data.content) + + def delete(self, keys: List[str]) -> None: + if 1 < len(keys) or len(keys) > 500: + raise VectorBucketException("Keys batch size must be between 1 and 500.") + body = self.with_metadata(keys=keys) + self._request.send(http_method="POST", path=["DeleteVectors"], body=body) + + +class SyncStorageVectorsClient: + def __init__(self, url: URL, headers: Headers, session: Client) -> None: + self._request = RequestBuilder(session, base_url=URL(url), headers=headers) + + def from_(self, bucket_name: str) -> SyncVectorBucketScope: + return SyncVectorBucketScope(self._request, bucket_name) diff --git a/src/storage/src/storage3/exceptions.py b/src/storage/src/storage3/exceptions.py index 9efcd2dd..d5be790a 100644 --- a/src/storage/src/storage3/exceptions.py +++ b/src/storage/src/storage3/exceptions.py @@ -1,19 +1,33 @@ -from typing import TypedDict +from typing import Optional, TypedDict, Union + +from pydantic import BaseModel from .utils import StorageException +class VectorBucketException(Exception): + def __init__(self, msg: str) -> None: + self.msg = msg + + +class VectorBucketErrorMessage(BaseModel): + statusCode: Union[str, int] + error: str + message: str + code: Optional[str] = None + + class StorageApiErrorDict(TypedDict): name: str message: str code: str - status: int + status: Union[int, str] class StorageApiError(StorageException): """Error raised when an operation on the storage API fails.""" - def __init__(self, message: str, code: str, status: int) -> None: + def __init__(self, message: str, code: str, status: Union[int, str]) -> None: error_message = ( f"{{'statusCode': {status}, 'error': {code}, 'message': {message}}}" ) diff --git a/src/storage/src/storage3/types.py b/src/storage/src/storage3/types.py index 43bf4bba..1bd1d24f 100644 --- a/src/storage/src/storage3/types.py +++ b/src/storage/src/storage3/types.py @@ -1,16 +1,23 @@ from __future__ import annotations +from collections.abc import Mapping, Sequence from dataclasses import asdict, dataclass from datetime import datetime -from typing import Any, Dict, Literal, Optional, TypedDict, Union +from typing import Any, Dict, List, Literal, Optional, TypedDict, Union -from pydantic import BaseModel, ConfigDict, TypeAdapter -from typing_extensions import ReadOnly +from pydantic import BaseModel, ConfigDict, Field, TypeAdapter +from typing_extensions import ReadOnly, TypeAlias, TypeAliasType RequestMethod = Literal["GET", "POST", "DELETE", "PUT", "HEAD"] config = ConfigDict(extra="ignore") +# https://docs.pydantic.dev/2.11/concepts/types/#named-recursive-types +JSON = TypeAliasType( + "JSON", "Union[None, bool, str, int, float, Sequence[JSON], Mapping[str, JSON]]" +) +JSONAdapter: TypeAdapter = TypeAdapter(JSON) + class BaseBucket(BaseModel): """Represents a file storage bucket.""" @@ -151,3 +158,118 @@ class CreateSignedUploadUrlOptions(BaseModel): }, total=False, ) + +DistanceMetric: TypeAlias = Literal["cosine", "euclidean"] + + +class MetadataConfiguration(BaseModel): + nonFilterableMetadataKeys: Optional[List[str]] + + +class ListIndexesOptions(BaseModel): + nextToken: Optional[str] = None + maxResults: Optional[int] = None + prefix: Optional[str] = None + + +class ListIndexesResponseItem(BaseModel): + indexName: str + + +class ListVectorIndexesResponse(BaseModel): + indexes: List[ListIndexesResponseItem] + nextToken: Optional[str] = None + + +class VectorIndex(BaseModel): + index_name: str = Field(alias="indexName") + bucket_name: str = Field(alias="vectorBucketName") + data_type: str = Field(alias="dataType") + dimension: int + distance_metric: DistanceMetric = Field(alias="distanceMetric") + metadata: Optional[MetadataConfiguration] = Field( + alias="metadataConfiguration", default=None + ) + creation_time: Optional[datetime] = None + + +class GetVectorIndexResponse(BaseModel): + index: VectorIndex + + +VectorFilter = Dict[str, Any] + + +class VectorData(BaseModel): + float32: List[float] + + +class VectorObject(BaseModel): + key: str + data: VectorData + metadata: Optional[dict[str, Union[str, bool, float]]] = None + + def as_json(self) -> JSON: + return {"key": self.key, "data": dict(self.data), "metadata": self.metadata} + + +class VectorMatch(BaseModel): + key: str + data: Optional[VectorData] = None + distance: Optional[int] = None + metadata: Optional[dict[str, Any]] = None + + +class GetVectorsResponse(BaseModel): + vectors: List[VectorMatch] + + +class ListVectorsResponse(BaseModel): + vectors: List[VectorMatch] + nextToken: Optional[str] = None + + +class QueryVectorsResponse(BaseModel): + matches: List[VectorMatch] + + +class AnalyticsBucket(BaseModel): + name: str + type: Literal["ANALYTICS"] + format: str + create_at: datetime + updated_at: datetime + + +SortColumn = Literal["id", "name", "created_at", "updated_at"] +SortOrder = Literal["asc", "desc"] + +AnalyticsBucketsParser = TypeAdapter(List[AnalyticsBucket]) + + +class AnalyticsBucketDeleteResponse(BaseModel): + message: str + + +class VectorBucketEncryptionConfiguration(BaseModel): + kmsKeyArn: Optional[str] = None + sseType: Optional[str] = None + + +class VectorBucket(BaseModel): + vectorBucketName: str + creationTime: Optional[datetime] = None + encryptionConfiguration: Optional[VectorBucketEncryptionConfiguration] = None + + +class GetVectorBucketResponse(BaseModel): + vectorBucket: VectorBucket + + +class ListVectorBucketsItem(BaseModel): + vectorBucketName: str + + +class ListVectorBucketsResponse(BaseModel): + vectorBuckets: List[ListVectorBucketsItem] + nextToken: Optional[str] = None