Skip to content

Commit e2a74cc

Browse files
authored
Merge pull request #108 from sanders41/docstrings
Formatting docstrings
2 parents c0aae75 + 6dc9ea7 commit e2a74cc

File tree

3 files changed

+25
-43
lines changed

3 files changed

+25
-43
lines changed

async_search_client/client.py

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -33,9 +33,10 @@ async def __aexit__(
3333
await self.aclose()
3434

3535
async def aclose(self) -> None:
36+
"""Closes the client. This only needs to be used if the client was not created with a
37+
context manager
3638
"""
37-
Closes the client. This only needs to be used if the client was not created with a context manager.
38-
"""
39+
3940
await self._http_client.aclose()
4041

4142
async def create_dump(self) -> DumpInfo:
@@ -79,10 +80,10 @@ async def get_index(self, uid: str) -> Index:
7980
return await Index(self._http_client, uid).fetch_info()
8081

8182
def index(self, uid: str) -> Index:
82-
"""
83-
Create a local reference to an index identified by UID, without doing an HTTP call.
83+
"""Create a local reference to an index identified by UID, without doing an HTTP call.
8484
Because no network call is made this method is not awaitable.
8585
"""
86+
8687
return Index(self._http_client, uid=uid)
8788

8889
async def get_all_stats(self) -> ClientStats:
@@ -124,16 +125,14 @@ async def get_raw_indexes(self) -> Optional[list[IndexInfo]]:
124125
return [IndexInfo(**x) for x in response.json()]
125126

126127
async def get_version(self) -> Version:
127-
"""
128-
Get version MeiliSearch that is running
129-
"""
128+
"""Get version MeiliSearch that is running"""
129+
130130
response = await self._http_requests.get(build_url(Paths.VERSION))
131131
return Version(**response.json())
132132

133133
async def health(self) -> Health:
134-
"""
135-
Get health of the MeiliSearch server
136-
"""
134+
"""Get health of the MeiliSearch server"""
135+
137136
response = await self._http_requests.get(build_url(Paths.HEALTH))
138137
return Health(**response.json())
139138

async_search_client/index.py

Lines changed: 13 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -58,9 +58,8 @@ async def delete(self) -> int:
5858
return response.status_code
5959

6060
async def delete_if_exists(self) -> bool:
61-
"""
62-
Deletes the index if it already exists
63-
"""
61+
"""Deletes the index if it already exists"""
62+
6463
try:
6564
await self.delete()
6665
return True
@@ -70,9 +69,8 @@ async def delete_if_exists(self) -> bool:
7069
return False
7170

7271
async def update(self, primary_key: str = None) -> Index:
73-
"""
74-
Update the index primary-key.
75-
"""
72+
"""Update the index primary-key"""
73+
7674
payload = {}
7775
if primary_key is not None:
7876
payload["primaryKey"] = primary_key
@@ -157,13 +155,13 @@ async def get_update_status(self, update_id: int) -> UpdateStatus:
157155
async def wait_for_pending_update(
158156
self, update_id: int, timeout_in_ms: int = 5000, interval_in_ms: int = 50
159157
) -> UpdateStatus:
160-
"""
161-
Wait until MeiliSearch processes an update, and get its status.
158+
"""Wait until MeiliSearch processes an update, and get its status.
162159
163160
update_id: identifier of the update to retrieve
164161
timeout_in_ms (optional): time the method should wait before raising a MeiliSearchTimeoutError
165162
interval_in_ms (optional): time interval the method should wait (sleep) between requests
166163
"""
164+
167165
start_time = datetime.now()
168166
elapsed_time = 0.0
169167
while elapsed_time < timeout_in_ms:
@@ -280,9 +278,7 @@ async def add_documents(
280278
async def add_documents_in_batches(
281279
self, documents: list[dict], batch_size: int = 1000, primary_key: Optional[str] = None
282280
) -> list[UpdateId]:
283-
"""
284-
Splits documents into batches to reduce RAM usage with indexing.
285-
"""
281+
"""Splits documents into batches to reduce RAM usage with indexing"""
286282

287283
update_ids: list[UpdateId] = []
288284

@@ -295,9 +291,7 @@ async def add_documents_in_batches(
295291
async def add_documents_from_file(
296292
self, file_path: Path | str, primary_key: Optional[str] = None
297293
) -> UpdateId:
298-
"""
299-
Add documents to the index from a json file.
300-
"""
294+
"""Add documents to the index from a json file"""
301295

302296
if isinstance(file_path, str):
303297
file_path = Path(file_path)
@@ -314,9 +308,7 @@ async def add_documents_from_file(
314308
async def add_documents_from_file_in_batches(
315309
self, file_path: Path | str, batch_size: int = 1000, primary_key: Optional[str] = None
316310
) -> list[UpdateId]:
317-
"""
318-
Add documents to the index from a json file in batches to reduce RAM usage.
319-
"""
311+
"""Add documents to the index from a json file in batches to reduce RAM usage"""
320312

321313
if isinstance(file_path, str):
322314
file_path = Path(file_path)
@@ -349,9 +341,7 @@ async def update_documents(
349341
async def update_documents_in_batches(
350342
self, documents: list[dict], batch_size: int = 1000, primary_key: Optional[str] = None
351343
) -> list[UpdateId]:
352-
"""
353-
Splits documents into batches to reduce RAM usage with indexing.
354-
"""
344+
"""Splits documents into batches to reduce RAM usage with indexing"""
355345

356346
update_ids: list[UpdateId] = []
357347

@@ -364,9 +354,7 @@ async def update_documents_in_batches(
364354
async def update_documents_from_file(
365355
self, file_path: Path | str, primary_key: Optional[str] = None
366356
) -> UpdateId:
367-
"""
368-
Update documents in the index from a json file.
369-
"""
357+
"""Update documents in the index from a json file"""
370358

371359
if isinstance(file_path, str):
372360
file_path = Path(file_path)
@@ -383,9 +371,7 @@ async def update_documents_from_file(
383371
async def update_documents_from_file_in_batches(
384372
self, file_path: Path | str, batch_size: int = 1000, primary_key: Optional[str] = None
385373
) -> list[UpdateId]:
386-
"""
387-
Update documents in the index from a json file in batches to reduce RAM usage.
388-
"""
374+
"""Update documents in the index from a json file in batches to reduce RAM usage"""
389375

390376
if isinstance(file_path, str):
391377
file_path = Path(file_path)
@@ -623,8 +609,7 @@ async def reset_attributes_for_faceting(self) -> UpdateId:
623609

624610
@staticmethod
625611
def _iso_to_date_time(iso_date: Optional[datetime | str]) -> Optional[datetime]:
626-
"""
627-
The microseconds from MeiliSearch are sometimes too long for python to convert so this
612+
"""The microseconds from MeiliSearch are sometimes too long for python to convert so this
628613
strips off the last digits to shorten it when that happens.
629614
"""
630615

tests/conftest.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -39,10 +39,10 @@ async def test_client():
3939
@pytest.mark.asyncio
4040
@pytest.fixture(autouse=True)
4141
async def clear_indexes(test_client):
42-
"""
43-
Auto-clears the indexes after each test function run.
42+
"""Auto-clears the indexes after each test function run.
4443
Makes all the test functions independent.
4544
"""
45+
4646
yield
4747
indexes = await test_client.get_indexes()
4848
if indexes:
@@ -92,9 +92,7 @@ async def indexes_sample(test_client):
9292

9393
@pytest.fixture(scope="session")
9494
def small_movies():
95-
"""
96-
Runs once per session. Provides the content of small_movies.json.
97-
"""
95+
"""Runs once per session. Provides the content of small_movies.json"""
9896

9997
with open(SMALL_MOVIES_PATH, "r") as movie_file:
10098
yield json.loads(movie_file.read())

0 commit comments

Comments
 (0)