|
32 | 32 | from copy import deepcopy
|
33 | 33 | from dataclasses import is_dataclass
|
34 | 34 | from http.client import HTTPResponse
|
35 |
| -from typing import Any, Dict, List, Optional, Tuple, Union |
| 35 | +from typing import Any, AsyncGenerator, Dict, List, Optional, Tuple, Union |
36 | 36 |
|
37 | 37 | import asyncio_atexit
|
38 | 38 | import httpx
|
@@ -6706,6 +6706,37 @@ async def rest_delete_async(
|
6706 | 6706 | **kwargs,
|
6707 | 6707 | )
|
6708 | 6708 |
|
| 6709 | + async def rest_get_paginated_async( |
| 6710 | + self, uri: str, limit: int = 20, offset: int = 0 |
| 6711 | + ) -> AsyncGenerator[Dict[str, Any], None]: |
| 6712 | + """ |
| 6713 | + Get paginated results asynchronously |
| 6714 | +
|
| 6715 | + Arguments: |
| 6716 | + uri: A URI that returns paginated results |
| 6717 | + limit: How many records should be returned per request |
| 6718 | + offset: At what record offset from the first should iteration start |
| 6719 | +
|
| 6720 | + Returns: |
| 6721 | + A generator over paginated results |
| 6722 | +
|
| 6723 | + The limit parameter is set at 20 by default. Using a larger limit results in fewer calls to the service, but if |
| 6724 | + responses are large enough to be a burden on the service they may be truncated. |
| 6725 | + """ |
| 6726 | + prev_num_results = sys.maxsize |
| 6727 | + while prev_num_results > 0: |
| 6728 | + params = {"offset": offset, "limit": limit} |
| 6729 | + page = await self.rest_get_async( |
| 6730 | + uri=uri, |
| 6731 | + params=params, |
| 6732 | + ) |
| 6733 | + results = page["results"] if "results" in page else page["children"] |
| 6734 | + prev_num_results = len(results) |
| 6735 | + |
| 6736 | + for result in results: |
| 6737 | + offset += 1 |
| 6738 | + yield result |
| 6739 | + |
6709 | 6740 |
|
6710 | 6741 | async def async_request_hook_httpx(span: Span, request: httpx.Request) -> None:
|
6711 | 6742 | """Hook used to encapsulate a span for this library. The request hook is called
|
|
0 commit comments