Skip to content

Commit e5476ab

Browse files
authored
Merge pull request #557 from sanders41/pydantic2
Add support for Pydantic v2
2 parents 2304ba5 + 0f9625c commit e5476ab

File tree

11 files changed

+725
-376
lines changed

11 files changed

+725
-376
lines changed

meilisearch_python_async/_utils.py

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
from __future__ import annotations
2+
3+
from datetime import datetime
4+
from functools import lru_cache
5+
6+
import pydantic
7+
8+
9+
@lru_cache(maxsize=1)
10+
def is_pydantic_2() -> bool:
11+
try:
12+
# __version__ was added with Pydantic 2 so we know if this errors the version is < 2.
13+
# Still check the version as a fail safe incase __version__ gets added to verion 1.
14+
if int(pydantic.__version__[:1]) >= 2: # type: ignore[attr-defined]
15+
return True
16+
else: # pragma: no cover
17+
# Raise an AttributeError to match the AttributeError on __version__ because in either
18+
# case we need to get to the same place.
19+
raise AttributeError
20+
except AttributeError: # pragma: no cover
21+
return False
22+
23+
24+
def iso_to_date_time(iso_date: datetime | str | None) -> datetime | None:
25+
"""Handle conversion of iso string to datetime.
26+
27+
The microseconds from Meilisearch are sometimes too long for python to convert so this
28+
strips off the last digits to shorten it when that happens.
29+
"""
30+
if not iso_date:
31+
return None
32+
33+
if isinstance(iso_date, datetime):
34+
return iso_date
35+
36+
try:
37+
return datetime.strptime(iso_date, "%Y-%m-%dT%H:%M:%S.%fZ")
38+
except ValueError:
39+
split = iso_date.split(".")
40+
reduce = len(split[1]) - 6
41+
reduced = f"{split[0]}.{split[1][:-reduce]}Z"
42+
return datetime.strptime(reduced, "%Y-%m-%dT%H:%M:%S.%fZ")

meilisearch_python_async/client.py

Lines changed: 25 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from httpx import AsyncClient
1111

1212
from meilisearch_python_async._http_requests import HttpRequests
13+
from meilisearch_python_async._utils import is_pydantic_2
1314
from meilisearch_python_async.errors import InvalidRestriction, MeilisearchApiError
1415
from meilisearch_python_async.index import Index
1516
from meilisearch_python_async.models.client import (
@@ -388,7 +389,10 @@ async def create_key(self, key: KeyCreate) -> Key:
388389
"""
389390
# The json.loads(key.json()) is because Pydantic can't serialize a date in a Python dict,
390391
# but can when converting to a json string.
391-
response = await self._http_requests.post("keys", json.loads(key.json(by_alias=True)))
392+
if is_pydantic_2:
393+
response = await self._http_requests.post("keys", json.loads(key.model_dump_json(by_alias=True))) # type: ignore[attr-defined]
394+
else: # pragma: no cover
395+
response = await self._http_requests.post("keys", json.loads(key.json(by_alias=True))) # type: ignore[attr-defined]
392396

393397
return Key(**response.json())
394398

@@ -495,11 +499,18 @@ async def update_key(self, key: KeyUpdate) -> Key:
495499
"""
496500
# The json.loads(key.json()) is because Pydantic can't serialize a date in a Python dict,
497501
# but can when converting to a json string.
498-
payload = {
499-
k: v
500-
for k, v in json.loads(key.json(by_alias=True)).items()
501-
if v is not None and k != "key"
502-
}
502+
if is_pydantic_2:
503+
payload = { # type: ignore[attr-defined]
504+
k: v
505+
for k, v in json.loads(key.model_dump_json(by_alias=True)).items()
506+
if v is not None and k != "key"
507+
}
508+
else: # pragma: no cover
509+
payload = { # type: ignore[attr-defined]
510+
k: v
511+
for k, v in json.loads(key.json(by_alias=True)).items()
512+
if v is not None and k != "key"
513+
}
503514
response = await self._http_requests.patch(f"keys/{key.key}", payload)
504515

505516
return Key(**response.json())
@@ -532,9 +543,14 @@ async def multi_search(self, queries: list[SearchParams]) -> list[SearchResultsW
532543
>>> search_results = await client.search(queries)
533544
"""
534545
url = "multi-search"
535-
response = await self._http_requests.post(
536-
url, body={"queries": [x.dict(by_alias=True) for x in queries]}
537-
)
546+
if is_pydantic_2:
547+
response = await self._http_requests.post(
548+
url, body={"queries": [x.model_dump(by_alias=True) for x in queries]} # type: ignore[attr-defined]
549+
)
550+
else: # pragma: no cover
551+
response = await self._http_requests.post(
552+
url, body={"queries": [x.dict(by_alias=True) for x in queries]} # type: ignore[attr-defined]
553+
)
538554

539555
return [SearchResultsWithUID(**x) for x in response.json()["results"]]
540556

meilisearch_python_async/index.py

Lines changed: 24 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
from httpx import AsyncClient
1414

1515
from meilisearch_python_async._http_requests import HttpRequests
16+
from meilisearch_python_async._utils import is_pydantic_2, iso_to_date_time
1617
from meilisearch_python_async.errors import InvalidDocumentError, MeilisearchError
1718
from meilisearch_python_async.models.documents import DocumentsInfo
1819
from meilisearch_python_async.models.index import IndexStats
@@ -54,8 +55,8 @@ def __init__(
5455
"""
5556
self.uid = uid
5657
self.primary_key = primary_key
57-
self.created_at: datetime | None = _iso_to_date_time(created_at)
58-
self.updated_at: datetime | None = _iso_to_date_time(updated_at)
58+
self.created_at: datetime | None = iso_to_date_time(created_at)
59+
self.updated_at: datetime | None = iso_to_date_time(updated_at)
5960
self._base_url = "indexes/"
6061
self._base_url_with_uid = f"{self._base_url}{self.uid}"
6162
self._documents_url = f"{self._base_url_with_uid}/documents"
@@ -175,10 +176,10 @@ async def fetch_info(self) -> Index:
175176
self.primary_key = index_dict["primaryKey"]
176177
loop = get_running_loop()
177178
self.created_at = await loop.run_in_executor(
178-
None, partial(_iso_to_date_time, index_dict["createdAt"])
179+
None, partial(iso_to_date_time, index_dict["createdAt"])
179180
)
180181
self.updated_at = await loop.run_in_executor(
181-
None, partial(_iso_to_date_time, index_dict["updatedAt"])
182+
None, partial(iso_to_date_time, index_dict["updatedAt"])
182183
)
183184
return self
184185

@@ -1477,7 +1478,10 @@ async def update_settings(self, body: MeilisearchSettings) -> TaskInfo:
14771478
>>> index = client.index("movies")
14781479
>>> await index.update_settings(new_settings)
14791480
"""
1480-
body_dict = {k: v for k, v in body.dict(by_alias=True).items() if v is not None}
1481+
if is_pydantic_2:
1482+
body_dict = {k: v for k, v in body.model_dump(by_alias=True).items() if v is not None} # type: ignore[attr-defined]
1483+
else: # pragma: no cover
1484+
body_dict = {k: v for k, v in body.dict(by_alias=True).items() if v is not None} # type: ignore[attr-defined]
14811485

14821486
url = f"{self._settings_url}"
14831487
response = await self._http_requests.patch(url, body_dict)
@@ -2184,7 +2188,11 @@ async def update_typo_tolerance(self, typo_tolerance: TypoTolerance) -> TaskInfo
21842188
>>> await index.update_typo_tolerance()
21852189
"""
21862190
url = f"{self._settings_url}/typo-tolerance"
2187-
response = await self._http_requests.patch(url, typo_tolerance.dict(by_alias=True))
2191+
2192+
if is_pydantic_2:
2193+
response = await self._http_requests.patch(url, typo_tolerance.model_dump(by_alias=True)) # type: ignore[attr-defined]
2194+
else: # pragma: no cover
2195+
response = await self._http_requests.patch(url, typo_tolerance.dict(by_alias=True)) # type: ignore[attr-defined]
21882196

21892197
return TaskInfo(**response.json())
21902198

@@ -2256,7 +2264,11 @@ async def update_faceting(self, faceting: Faceting) -> TaskInfo:
22562264
>>> await index.update_faceting(faceting=Faceting(max_values_per_facet=100))
22572265
"""
22582266
url = f"{self._settings_url}/faceting"
2259-
response = await self._http_requests.patch(url, faceting.dict(by_alias=True))
2267+
2268+
if is_pydantic_2:
2269+
response = await self._http_requests.patch(url, faceting.model_dump(by_alias=True)) # type: ignore[attr-defined]
2270+
else: # pragma: no cover
2271+
response = await self._http_requests.patch(url, faceting.dict(by_alias=True)) # type: ignore[attr-defined]
22602272

22612273
return TaskInfo(**response.json())
22622274

@@ -2329,7 +2341,11 @@ async def update_pagination(self, settings: Pagination) -> TaskInfo:
23292341
>>> await index.update_pagination(settings=Pagination(max_total_hits=123))
23302342
"""
23312343
url = f"{self._settings_url}/pagination"
2332-
response = await self._http_requests.patch(url, settings.dict(by_alias=True))
2344+
2345+
if is_pydantic_2:
2346+
response = await self._http_requests.patch(url, settings.model_dump(by_alias=True)) # type: ignore[attr-defined]
2347+
else: # pragma: no cover
2348+
response = await self._http_requests.patch(url, settings.dict(by_alias=True)) # type: ignore[attr-defined]
23332349

23342350
return TaskInfo(**response.json())
23352351

@@ -2375,27 +2391,6 @@ def _combine_documents(documents: list[list[Any]]) -> list[Any]:
23752391
return [x for y in documents for x in y]
23762392

23772393

2378-
def _iso_to_date_time(iso_date: datetime | str | None) -> datetime | None:
2379-
"""Handle conversion of iso string to datetime.
2380-
2381-
The microseconds from Meilisearch are sometimes too long for python to convert so this
2382-
strips off the last digits to shorten it when that happens.
2383-
"""
2384-
if not iso_date:
2385-
return None
2386-
2387-
if isinstance(iso_date, datetime):
2388-
return iso_date
2389-
2390-
try:
2391-
return datetime.strptime(iso_date, "%Y-%m-%dT%H:%M:%S.%fZ")
2392-
except ValueError:
2393-
split = iso_date.split(".")
2394-
reduce = len(split[1]) - 6
2395-
reduced = f"{split[0]}.{split[1][:-reduce]}Z"
2396-
return datetime.strptime(reduced, "%Y-%m-%dT%H:%M:%S.%fZ")
2397-
2398-
23992394
async def _load_documents_from_file(
24002395
file_path: Path | str,
24012396
csv_delimiter: str | None = None,

meilisearch_python_async/models/client.py

Lines changed: 88 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
from datetime import datetime
2-
from typing import Dict, List, Optional
2+
from typing import Dict, List, Optional, Union
33

4+
import pydantic
45
from camel_converter.pydantic_base import CamelBase
56

7+
from meilisearch_python_async._utils import is_pydantic_2, iso_to_date_time
68
from meilisearch_python_async.models.index import IndexStats
79

810

@@ -11,6 +13,20 @@ class ClientStats(CamelBase):
1113
last_update: Optional[datetime] = None
1214
indexes: Optional[Dict[str, IndexStats]] = None
1315

16+
if is_pydantic_2:
17+
18+
@pydantic.field_validator("last_update", mode="before") # type: ignore[attr-defined]
19+
@classmethod
20+
def validate_last_update(cls, v: str) -> Union[datetime, None]:
21+
return iso_to_date_time(v)
22+
23+
else: # pragma: no cover
24+
25+
@pydantic.validator("last_update", pre=True)
26+
@classmethod
27+
def validate_last_update(cls, v: str) -> Union[datetime, None]:
28+
return iso_to_date_time(v)
29+
1430

1531
class _KeyBase(CamelBase):
1632
uid: str
@@ -20,17 +36,66 @@ class _KeyBase(CamelBase):
2036
indexes: List[str]
2137
expires_at: Optional[datetime] = None
2238

23-
class Config:
24-
json_encoders = {
25-
datetime: lambda v: None if not v else f"{str(v).split('.')[0].replace(' ', 'T')}Z"
26-
}
39+
if is_pydantic_2:
40+
model_config = pydantic.ConfigDict(ser_json_timedelta="iso8601") # type: ignore[typeddict-unknown-key]
41+
42+
@pydantic.field_validator("expires_at", mode="before") # type: ignore[attr-defined]
43+
@classmethod
44+
def validate_expires_at(cls, v: str) -> Union[datetime, None]:
45+
return iso_to_date_time(v)
46+
47+
else: # pragma: no cover
48+
49+
@pydantic.validator("expires_at", pre=True)
50+
@classmethod
51+
def validate_expires_at(cls, v: str) -> Union[datetime, None]:
52+
return iso_to_date_time(v)
53+
54+
class Config:
55+
json_encoders = {
56+
datetime: lambda v: None if not v else f"{str(v).split('.')[0].replace(' ', 'T')}Z"
57+
}
2758

2859

2960
class Key(_KeyBase):
3061
key: str
3162
created_at: datetime
3263
updated_at: Optional[datetime] = None
3364

65+
if is_pydantic_2:
66+
67+
@pydantic.field_validator("created_at", mode="before") # type: ignore[attr-defined]
68+
@classmethod
69+
def validate_created_at(cls, v: str) -> datetime:
70+
converted = iso_to_date_time(v)
71+
72+
if not converted: # pragma: no cover
73+
raise ValueError("created_at is required")
74+
75+
return converted
76+
77+
@pydantic.field_validator("updated_at", mode="before") # type: ignore[attr-defined]
78+
@classmethod
79+
def validate_updated_at(cls, v: str) -> Union[datetime, None]:
80+
return iso_to_date_time(v)
81+
82+
else: # pragma: no cover
83+
84+
@pydantic.validator("created_at", pre=True)
85+
@classmethod
86+
def validate_created_at(cls, v: str) -> datetime:
87+
converted = iso_to_date_time(v)
88+
89+
if not converted:
90+
raise ValueError("created_at is required")
91+
92+
return converted
93+
94+
@pydantic.validator("updated_at", pre=True)
95+
@classmethod
96+
def validate_updated_at(cls, v: str) -> Union[datetime, None]:
97+
return iso_to_date_time(v)
98+
3499

35100
class KeyCreate(CamelBase):
36101
name: Optional[str] = None
@@ -39,10 +104,15 @@ class KeyCreate(CamelBase):
39104
indexes: List[str]
40105
expires_at: Optional[datetime] = None
41106

42-
class Config:
43-
json_encoders = {
44-
datetime: lambda v: None if not v else f"{str(v).split('.')[0].replace(' ', 'T')}Z"
45-
}
107+
if is_pydantic_2:
108+
model_config = pydantic.ConfigDict(ser_json_timedelta="iso8601") # type: ignore[typeddict-unknown-key]
109+
110+
else: # pragma: no cover
111+
112+
class Config:
113+
json_encoders = {
114+
datetime: lambda v: None if not v else f"{str(v).split('.')[0].replace(' ', 'T')}Z"
115+
}
46116

47117

48118
class KeyUpdate(CamelBase):
@@ -53,10 +123,15 @@ class KeyUpdate(CamelBase):
53123
indexes: Optional[List[str]] = None
54124
expires_at: Optional[datetime] = None
55125

56-
class Config:
57-
json_encoders = {
58-
datetime: lambda v: None if not v else f"{str(v).split('.')[0].replace(' ', 'T')}Z"
59-
}
126+
if is_pydantic_2:
127+
model_config = pydantic.ConfigDict(ser_json_timedelta="iso8601") # type: ignore[typeddict-unknown-key]
128+
129+
else: # pragma: no cover
130+
131+
class Config:
132+
json_encoders = {
133+
datetime: lambda v: None if not v else f"{str(v).split('.')[0].replace(' ', 'T')}Z"
134+
}
60135

61136

62137
class KeySearch(CamelBase):

0 commit comments

Comments
 (0)