diff --git a/docs/02_concepts/09_streaming.mdx b/docs/02_concepts/09_streaming.mdx index b365e34a..c62da01e 100644 --- a/docs/02_concepts/09_streaming.mdx +++ b/docs/02_concepts/09_streaming.mdx @@ -18,7 +18,7 @@ Supported streaming methods: - [`KeyValueStoreClient.stream_record`](/reference/class/KeyValueStoreClient#stream_record) - Stream key-value store records as raw data. - [`LogClient.stream`](/reference/class/LogClient#stream) - Stream logs in real time. -These methods return a raw, context-managed `httpx.Response` object. The response must be consumed within a with block to ensure that the connection is closed automatically, preventing memory leaks or unclosed connections. +These methods return a raw, context-managed `impit.Response` object. The response must be consumed within a with block to ensure that the connection is closed automatically, preventing memory leaks or unclosed connections. The following example demonstrates how to stream the logs of an Actor run incrementally: diff --git a/docs/02_concepts/code/01_async_support.py b/docs/02_concepts/code/01_async_support.py index 28186519..22cc390e 100644 --- a/docs/02_concepts/code/01_async_support.py +++ b/docs/02_concepts/code/01_async_support.py @@ -17,8 +17,8 @@ async def main() -> None: # Stream the logs async with log_client.stream() as async_log_stream: if async_log_stream: - async for line in async_log_stream.aiter_lines(): - print(line) + async for bytes_chunk in async_log_stream.aiter_bytes(): + print(bytes_chunk) if __name__ == '__main__': diff --git a/docs/02_concepts/code/09_streaming_async.py b/docs/02_concepts/code/09_streaming_async.py index 6ff097a8..5459784e 100644 --- a/docs/02_concepts/code/09_streaming_async.py +++ b/docs/02_concepts/code/09_streaming_async.py @@ -10,5 +10,5 @@ async def main() -> None: async with log_client.stream() as log_stream: if log_stream: - for line in log_stream.iter_lines(): - print(line) + async for bytes_chunk in log_stream.aiter_bytes(): + print(bytes_chunk) diff --git a/docs/02_concepts/code/09_streaming_sync.py b/docs/02_concepts/code/09_streaming_sync.py index 4eb0093d..e7617ab3 100644 --- a/docs/02_concepts/code/09_streaming_sync.py +++ b/docs/02_concepts/code/09_streaming_sync.py @@ -10,5 +10,5 @@ def main() -> None: with log_client.stream() as log_stream: if log_stream: - for line in log_stream.iter_lines(): - print(line) + for bytes_chunk in log_stream.iter_bytes(): + print(bytes_chunk) diff --git a/docs/04_upgrading/upgrading_to_v2.md b/docs/04_upgrading/upgrading_to_v2.md new file mode 100644 index 00000000..9de34225 --- /dev/null +++ b/docs/04_upgrading/upgrading_to_v2.md @@ -0,0 +1,18 @@ +--- +id: upgrading-to-v2 +title: Upgrading to v2 +--- + +This page summarizes the breaking changes between Apify Python API client v1.x and v2.0. + +## Python version support + + + +## Change underlying HTTP library + +In v2.0, the Apify Python API client switched from using `httpx` to [`impit`](https://github.com/apify/impit) as the underlying HTTP library. + +## Update signature of methods + + diff --git a/pyproject.toml b/pyproject.toml index ff07ea26..634e43b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ keywords = ["apify", "api", "client", "automation", "crawling", "scraping"] dependencies = [ "apify-shared<2.0.0", "colorama>=0.4.0", - "httpx>=0.25", + "impit>=0.5.2", "more_itertools>=10.0.0", ] @@ -54,7 +54,6 @@ dev = [ "pytest~=8.4.0", "pytest-httpserver>=1.1.3", "redbaron~=0.9.0", - "respx~=0.22.0", "ruff~=0.12.0", "setuptools", # setuptools are used by pytest but not explicitly required "types-colorama~=0.4.15.20240106", @@ -175,7 +174,7 @@ warn_unused_ignores = true exclude = [] [[tool.mypy.overrides]] -module = ["pandas", "respx"] +module = ["pandas"] ignore_missing_imports = true [tool.basedpyright] diff --git a/src/apify_client/_errors.py b/src/apify_client/_errors.py index 87a91f2b..6b3d8db2 100644 --- a/src/apify_client/_errors.py +++ b/src/apify_client/_errors.py @@ -1,6 +1,8 @@ from __future__ import annotations -import httpx +import json as jsonlib + +import impit from apify_shared.utils import ignore_docs @@ -17,12 +19,13 @@ class ApifyApiError(ApifyClientError): """ @ignore_docs - def __init__(self, response: httpx.Response, attempt: int) -> None: + def __init__(self, response: impit.Response, attempt: int, method: str = 'GET') -> None: """Initialize a new instance. Args: response: The response to the failed API call. attempt: Which attempt was the request that failed. + method: The HTTP method used for the request. """ self.message: str | None = None self.type: str | None = None @@ -30,7 +33,7 @@ def __init__(self, response: httpx.Response, attempt: int) -> None: self.message = f'Unexpected error: {response.text}' try: - response_data = response.json() + response_data = jsonlib.loads(response.text) if 'error' in response_data: self.message = response_data['error']['message'] self.type = response_data['error']['type'] @@ -44,7 +47,7 @@ def __init__(self, response: httpx.Response, attempt: int) -> None: self.name = 'ApifyApiError' self.status_code = response.status_code self.attempt = attempt - self.http_method = response.request.method + self.http_method = method # TODO: self.client_method # noqa: TD003 # TODO: self.original_stack # noqa: TD003 @@ -61,7 +64,7 @@ class InvalidResponseBodyError(ApifyClientError): """ @ignore_docs - def __init__(self, response: httpx.Response) -> None: + def __init__(self, response: impit.Response) -> None: """Initialize a new instance. Args: @@ -80,8 +83,8 @@ def is_retryable_error(exc: Exception) -> bool: exc, ( InvalidResponseBodyError, - httpx.NetworkError, - httpx.TimeoutException, - httpx.RemoteProtocolError, + impit.NetworkError, + impit.TimeoutException, + impit.RemoteProtocolError, ), ) diff --git a/src/apify_client/_http_client.py b/src/apify_client/_http_client.py index 74d98553..d6e2f1f6 100644 --- a/src/apify_client/_http_client.py +++ b/src/apify_client/_http_client.py @@ -8,8 +8,9 @@ from http import HTTPStatus from importlib import metadata from typing import TYPE_CHECKING, Any +from urllib.parse import urlencode -import httpx +import impit from apify_shared.utils import ignore_docs, is_content_type_json, is_content_type_text, is_content_type_xml from apify_client._errors import ApifyApiError, InvalidResponseBodyError, is_retryable_error @@ -59,13 +60,13 @@ def __init__( if token is not None: headers['Authorization'] = f'Bearer {token}' - self.httpx_client = httpx.Client(headers=headers, follow_redirects=True, timeout=timeout_secs) - self.httpx_async_client = httpx.AsyncClient(headers=headers, follow_redirects=True, timeout=timeout_secs) + self.impit_client = impit.Client(headers=headers, follow_redirects=True, timeout=timeout_secs) + self.impit_async_client = impit.AsyncClient(headers=headers, follow_redirects=True, timeout=timeout_secs) self.stats = stats or Statistics() @staticmethod - def _maybe_parse_response(response: httpx.Response) -> Any: + def _maybe_parse_response(response: impit.Response) -> Any: if response.status_code == HTTPStatus.NO_CONTENT: return None @@ -75,7 +76,7 @@ def _maybe_parse_response(response: httpx.Response) -> Any: try: if is_content_type_json(content_type): - return response.json() + return jsonlib.loads(response.text) elif is_content_type_xml(content_type) or is_content_type_text(content_type): # noqa: RET505 return response.text else: @@ -131,6 +132,21 @@ def _prepare_request_call( data, ) + def _build_url_with_params(self, url: str, params: dict | None = None) -> str: + if not params: + return url + + param_pairs: list[tuple[str, str]] = [] + for key, value in params.items(): + if isinstance(value, list): + param_pairs.extend((key, str(v)) for v in value) + else: + param_pairs.append((key, str(value))) + + query_string = urlencode(param_pairs) + + return f'{url}?{query_string}' + class HTTPClient(_BaseHTTPClient): def call( @@ -145,7 +161,7 @@ def call( stream: bool | None = None, parse_response: bool | None = True, timeout_secs: int | None = None, - ) -> httpx.Response: + ) -> impit.Response: log_context.method.set(method) log_context.url.set(url) @@ -156,34 +172,26 @@ def call( headers, params, content = self._prepare_request_call(headers, params, data, json) - httpx_client = self.httpx_client + impit_client = self.impit_client - def _make_request(stop_retrying: Callable, attempt: int) -> httpx.Response: + def _make_request(stop_retrying: Callable, attempt: int) -> impit.Response: log_context.attempt.set(attempt) logger.debug('Sending request') self.stats.requests += 1 try: - request = httpx_client.build_request( + # Increase timeout with each attempt. Max timeout is bounded by the client timeout. + timeout = min(self.timeout_secs, (timeout_secs or self.timeout_secs) * 2 ** (attempt - 1)) + + url_with_params = self._build_url_with_params(url, params) + + response = impit_client.request( method=method, - url=url, + url=url_with_params, headers=headers, - params=params, content=content, - ) - - # Increase timeout with each attempt. Max timeout is bounded by the client timeout. - timeout = min(self.timeout_secs, (timeout_secs or self.timeout_secs) * 2 ** (attempt - 1)) - request.extensions['timeout'] = { - 'connect': timeout, - 'pool': timeout, - 'read': timeout, - 'write': timeout, - } - - response = httpx_client.send( - request=request, + timeout=timeout, stream=stream or False, ) @@ -217,7 +225,7 @@ def _make_request(stop_retrying: Callable, attempt: int) -> httpx.Response: # Read the response in case it is a stream, so we can raise the error properly response.read() - raise ApifyApiError(response, attempt) + raise ApifyApiError(response, attempt, method=method) return retry_with_exp_backoff( _make_request, @@ -241,7 +249,7 @@ async def call( stream: bool | None = None, parse_response: bool | None = True, timeout_secs: int | None = None, - ) -> httpx.Response: + ) -> impit.Response: log_context.method.set(method) log_context.url.set(url) @@ -252,31 +260,23 @@ async def call( headers, params, content = self._prepare_request_call(headers, params, data, json) - httpx_async_client = self.httpx_async_client + impit_async_client = self.impit_async_client - async def _make_request(stop_retrying: Callable, attempt: int) -> httpx.Response: + async def _make_request(stop_retrying: Callable, attempt: int) -> impit.Response: log_context.attempt.set(attempt) logger.debug('Sending request') try: - request = httpx_async_client.build_request( + # Increase timeout with each attempt. Max timeout is bounded by the client timeout. + timeout = min(self.timeout_secs, (timeout_secs or self.timeout_secs) * 2 ** (attempt - 1)) + + url_with_params = self._build_url_with_params(url, params) + + response = await impit_async_client.request( method=method, - url=url, + url=url_with_params, headers=headers, - params=params, content=content, - ) - - # Increase timeout with each attempt. Max timeout is bounded by the client timeout. - timeout = min(self.timeout_secs, (timeout_secs or self.timeout_secs) * 2 ** (attempt - 1)) - request.extensions['timeout'] = { - 'connect': timeout, - 'pool': timeout, - 'read': timeout, - 'write': timeout, - } - - response = await httpx_async_client.send( - request=request, + timeout=timeout, stream=stream or False, ) @@ -310,7 +310,7 @@ async def _make_request(stop_retrying: Callable, attempt: int) -> httpx.Response # Read the response in case it is a stream, so we can raise the error properly await response.aread() - raise ApifyApiError(response, attempt) + raise ApifyApiError(response, attempt, method=method) return await retry_with_exp_backoff_async( _make_request, diff --git a/src/apify_client/clients/base/actor_job_base_client.py b/src/apify_client/clients/base/actor_job_base_client.py index 2b63c834..13c23c40 100644 --- a/src/apify_client/clients/base/actor_job_base_client.py +++ b/src/apify_client/clients/base/actor_job_base_client.py @@ -1,6 +1,7 @@ from __future__ import annotations import asyncio +import json as jsonlib import math import time from datetime import datetime, timezone @@ -39,7 +40,7 @@ def _wait_for_finish(self, wait_secs: int | None = None) -> dict | None: method='GET', params=self._params(waitForFinish=wait_for_finish), ) - job = parse_date_fields(pluck_data(response.json())) + job = parse_date_fields(pluck_data(jsonlib.loads(response.text))) seconds_elapsed = math.floor((datetime.now(timezone.utc) - started_at).total_seconds()) if ActorJobStatus(job['status']).is_terminal or ( @@ -70,7 +71,7 @@ def _abort(self, *, gracefully: bool | None = None) -> dict: method='POST', params=self._params(gracefully=gracefully), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) @ignore_docs @@ -94,7 +95,7 @@ async def _wait_for_finish(self, wait_secs: int | None = None) -> dict | None: method='GET', params=self._params(waitForFinish=wait_for_finish), ) - job = parse_date_fields(pluck_data(response.json())) + job = parse_date_fields(pluck_data(jsonlib.loads(response.text))) seconds_elapsed = math.floor((datetime.now(timezone.utc) - started_at).total_seconds()) if ActorJobStatus(job['status']).is_terminal or ( @@ -125,4 +126,4 @@ async def _abort(self, *, gracefully: bool | None = None) -> dict: method='POST', params=self._params(gracefully=gracefully), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) diff --git a/src/apify_client/clients/base/resource_client.py b/src/apify_client/clients/base/resource_client.py index 37e2d3b0..8a8ef279 100644 --- a/src/apify_client/clients/base/resource_client.py +++ b/src/apify_client/clients/base/resource_client.py @@ -1,5 +1,7 @@ from __future__ import annotations +import json as jsonlib + from apify_shared.utils import ignore_docs, parse_date_fields from apify_client._errors import ApifyApiError @@ -20,7 +22,7 @@ def _get(self, timeout_secs: int | None = None) -> dict | None: timeout_secs=timeout_secs, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -36,7 +38,7 @@ def _update(self, updated_fields: dict, timeout_secs: int | None = None) -> dict timeout_secs=timeout_secs, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def _delete(self, timeout_secs: int | None = None) -> None: try: @@ -64,7 +66,7 @@ async def _get(self, timeout_secs: int | None = None) -> dict | None: timeout_secs=timeout_secs, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -80,7 +82,7 @@ async def _update(self, updated_fields: dict, timeout_secs: int | None = None) - timeout_secs=timeout_secs, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def _delete(self, timeout_secs: int | None = None) -> None: try: diff --git a/src/apify_client/clients/base/resource_collection_client.py b/src/apify_client/clients/base/resource_collection_client.py index e4984fa9..f007c596 100644 --- a/src/apify_client/clients/base/resource_collection_client.py +++ b/src/apify_client/clients/base/resource_collection_client.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import Any, Generic, TypeVar from apify_shared.utils import ignore_docs, parse_date_fields @@ -53,7 +54,7 @@ def _list(self, **kwargs: Any) -> ListPage: params=self._params(**kwargs), ) - return ListPage(parse_date_fields(pluck_data(response.json()))) + return ListPage(parse_date_fields(pluck_data(jsonlib.loads(response.text)))) def _create(self, resource: dict) -> dict: response = self.http_client.call( @@ -63,7 +64,7 @@ def _create(self, resource: dict) -> dict: json=resource, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def _get_or_create(self, name: str | None = None, resource: dict | None = None) -> dict: response = self.http_client.call( @@ -73,7 +74,7 @@ def _get_or_create(self, name: str | None = None, resource: dict | None = None) json=resource, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) @ignore_docs @@ -87,7 +88,7 @@ async def _list(self, **kwargs: Any) -> ListPage: params=self._params(**kwargs), ) - return ListPage(parse_date_fields(pluck_data(response.json()))) + return ListPage(parse_date_fields(pluck_data(jsonlib.loads(response.text)))) async def _create(self, resource: dict) -> dict: response = await self.http_client.call( @@ -97,7 +98,7 @@ async def _create(self, resource: dict) -> dict: json=resource, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def _get_or_create( self, @@ -111,4 +112,4 @@ async def _get_or_create( json=resource, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) diff --git a/src/apify_client/clients/resource_clients/actor.py b/src/apify_client/clients/resource_clients/actor.py index a2473699..9b64795b 100644 --- a/src/apify_client/clients/resource_clients/actor.py +++ b/src/apify_client/clients/resource_clients/actor.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import TYPE_CHECKING, Any, Literal from apify_shared.utils import ( @@ -276,7 +277,7 @@ def start( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def call( self, @@ -386,7 +387,7 @@ def build( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def builds(self) -> BuildCollectionClient: """Retrieve a client for the builds of this Actor.""" @@ -417,7 +418,7 @@ async def default_build( ) response = self.http_client.call(url=self._url('builds/default'), method='GET', params=request_params) - data = pluck_data(response.json()) + data = pluck_data(jsonlib.loads(response.text)) return BuildClient( base_url=self.base_url, @@ -681,7 +682,7 @@ async def start( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def call( self, @@ -795,7 +796,7 @@ async def build( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def builds(self) -> BuildCollectionClientAsync: """Retrieve a client for the builds of this Actor.""" @@ -830,7 +831,7 @@ async def default_build( method='GET', params=request_params, ) - data = pluck_data(response.json()) + data = pluck_data(jsonlib.loads(response.text)) return BuildClientAsync( base_url=self.base_url, diff --git a/src/apify_client/clients/resource_clients/build.py b/src/apify_client/clients/resource_clients/build.py index bbc91f59..b427fcfa 100644 --- a/src/apify_client/clients/resource_clients/build.py +++ b/src/apify_client/clients/resource_clients/build.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import Any from apify_shared.utils import ignore_docs @@ -56,7 +57,7 @@ def get_open_api_definition(self) -> dict | None: method='GET', ) - response_data: dict = response.json() + response_data: dict = jsonlib.loads(response.text) return response_data @@ -133,7 +134,7 @@ async def get_open_api_definition(self) -> dict | None: method='GET', ) - response_data: dict = response.json() + response_data: dict = jsonlib.loads(response.text) return response_data diff --git a/src/apify_client/clients/resource_clients/dataset.py b/src/apify_client/clients/resource_clients/dataset.py index 368eef0b..19e98bda 100644 --- a/src/apify_client/clients/resource_clients/dataset.py +++ b/src/apify_client/clients/resource_clients/dataset.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib import warnings from contextlib import asynccontextmanager, contextmanager from typing import TYPE_CHECKING, Any @@ -14,7 +15,7 @@ if TYPE_CHECKING: from collections.abc import AsyncIterator, Iterator - import httpx + import impit from apify_shared.consts import StorageGeneralAccess from apify_shared.types import JSONSerializable @@ -137,7 +138,7 @@ def list_items( params=request_params, ) - data = response.json() + data = jsonlib.loads(response.text) return ListPage( { @@ -446,7 +447,7 @@ def stream_items( skip_hidden: bool | None = None, xml_root: str | None = None, xml_row: str | None = None, - ) -> Iterator[httpx.Response]: + ) -> Iterator[impit.Response]: """Retrieve the items in the dataset as a stream. https://docs.apify.com/api/v2#/reference/datasets/item-collection/get-items @@ -565,7 +566,7 @@ def get_statistics(self) -> dict | None: params=self._params(), timeout_secs=_SMALL_TIMEOUT, ) - return pluck_data(response.json()) + return pluck_data(jsonlib.loads(response.text)) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -687,7 +688,7 @@ async def list_items( params=request_params, ) - data = response.json() + data = jsonlib.loads(response.text) return ListPage( { @@ -903,7 +904,7 @@ async def stream_items( skip_hidden: bool | None = None, xml_root: str | None = None, xml_row: str | None = None, - ) -> AsyncIterator[httpx.Response]: + ) -> AsyncIterator[impit.Response]: """Retrieve the items in the dataset as a stream. https://docs.apify.com/api/v2#/reference/datasets/item-collection/get-items @@ -1022,7 +1023,7 @@ async def get_statistics(self) -> dict | None: params=self._params(), timeout_secs=_SMALL_TIMEOUT, ) - return pluck_data(response.json()) + return pluck_data(jsonlib.loads(response.text)) except ApifyApiError as exc: catch_not_found_or_throw(exc) diff --git a/src/apify_client/clients/resource_clients/key_value_store.py b/src/apify_client/clients/resource_clients/key_value_store.py index 7100d475..ccc254f8 100644 --- a/src/apify_client/clients/resource_clients/key_value_store.py +++ b/src/apify_client/clients/resource_clients/key_value_store.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib import warnings from contextlib import asynccontextmanager, contextmanager from http import HTTPStatus @@ -99,7 +100,7 @@ def list_keys( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def get_record(self, key: str, *, as_bytes: bool = False, as_file: bool = False) -> dict | None: """Retrieve the given record from the key-value store. @@ -367,7 +368,7 @@ async def list_keys( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def get_record(self, key: str) -> dict | None: """Retrieve the given record from the key-value store. diff --git a/src/apify_client/clients/resource_clients/log.py b/src/apify_client/clients/resource_clients/log.py index bca8b07a..e007f667 100644 --- a/src/apify_client/clients/resource_clients/log.py +++ b/src/apify_client/clients/resource_clients/log.py @@ -21,7 +21,7 @@ from collections.abc import AsyncIterator, Iterator from types import TracebackType - import httpx + import impit from typing_extensions import Self from apify_client.clients import RunClient, RunClientAsync @@ -87,7 +87,7 @@ def get_as_bytes(self, *, raw: bool = False) -> bytes | None: return None @contextmanager - def stream(self, *, raw: bool = False) -> Iterator[httpx.Response | None]: + def stream(self, *, raw: bool = False) -> Iterator[impit.Response | None]: """Retrieve the log as a stream. https://docs.apify.com/api/v2#/reference/logs/log/get-log @@ -177,7 +177,7 @@ async def get_as_bytes(self, *, raw: bool = False) -> bytes | None: return None @asynccontextmanager - async def stream(self, *, raw: bool = False) -> AsyncIterator[httpx.Response | None]: + async def stream(self, *, raw: bool = False) -> AsyncIterator[impit.Response | None]: """Retrieve the log as a stream. https://docs.apify.com/api/v2#/reference/logs/log/get-log diff --git a/src/apify_client/clients/resource_clients/request_queue.py b/src/apify_client/clients/resource_clients/request_queue.py index 7470c84f..d6716b33 100644 --- a/src/apify_client/clients/resource_clients/request_queue.py +++ b/src/apify_client/clients/resource_clients/request_queue.py @@ -1,6 +1,7 @@ from __future__ import annotations import asyncio +import json as jsonlib import logging import math from collections.abc import Iterable @@ -116,7 +117,7 @@ def list_head(self, *, limit: int | None = None) -> dict: timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) -> dict: """Retrieve a given number of unlocked requests from the beginning of the queue and lock them for a given time. @@ -139,7 +140,7 @@ def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) -> dic timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def add_request(self, request: dict, *, forefront: bool | None = None) -> dict: """Add a request to the queue. @@ -163,7 +164,7 @@ def add_request(self, request: dict, *, forefront: bool | None = None) -> dict: timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def get_request(self, request_id: str) -> dict | None: """Retrieve a request from the queue. @@ -183,7 +184,7 @@ def get_request(self, request_id: str) -> dict | None: params=self._params(), timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -214,7 +215,7 @@ def update_request(self, request: dict, *, forefront: bool | None = None) -> dic timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def delete_request(self, request_id: str) -> None: """Delete a request from the queue. @@ -260,7 +261,7 @@ def prolong_request_lock( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def delete_request_lock(self, request_id: str, *, forefront: bool | None = None) -> None: """Delete the lock on a request. @@ -349,7 +350,7 @@ def batch_add_requests( timeout_secs=_MEDIUM_TIMEOUT, ) - response_parsed = parse_date_fields(pluck_data(response.json())) + response_parsed = parse_date_fields(pluck_data(jsonlib.loads(response.text))) processed_requests.extend(response_parsed.get('processedRequests', [])) unprocessed_requests.extend(response_parsed.get('unprocessedRequests', [])) @@ -376,7 +377,7 @@ def batch_delete_requests(self, requests: list[dict]) -> dict: timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def list_requests( self, @@ -401,7 +402,7 @@ def list_requests( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def unlock_requests(self: RequestQueueClient) -> dict: """Unlock all requests in the queue, which were locked by the same clientKey or from the same Actor run. @@ -419,7 +420,7 @@ def unlock_requests(self: RequestQueueClient) -> dict: params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) class RequestQueueClientAsync(ResourceClientAsync): @@ -497,7 +498,7 @@ async def list_head(self, *, limit: int | None = None) -> dict: timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) -> dict: """Retrieve a given number of unlocked requests from the beginning of the queue and lock them for a given time. @@ -520,7 +521,7 @@ async def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def add_request(self, request: dict, *, forefront: bool | None = None) -> dict: """Add a request to the queue. @@ -544,7 +545,7 @@ async def add_request(self, request: dict, *, forefront: bool | None = None) -> timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def get_request(self, request_id: str) -> dict | None: """Retrieve a request from the queue. @@ -564,7 +565,7 @@ async def get_request(self, request_id: str) -> dict | None: params=self._params(), timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -595,7 +596,7 @@ async def update_request(self, request: dict, *, forefront: bool | None = None) timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def delete_request(self, request_id: str) -> None: """Delete a request from the queue. @@ -639,7 +640,7 @@ async def prolong_request_lock( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def delete_request_lock( self, @@ -695,7 +696,7 @@ async def _batch_add_requests_worker( timeout_secs=_MEDIUM_TIMEOUT, ) - response_parsed = parse_date_fields(pluck_data(response.json())) + response_parsed = parse_date_fields(pluck_data(jsonlib.loads(response.text))) processed_requests.extend(response_parsed.get('processedRequests', [])) unprocessed_requests.extend(response_parsed.get('unprocessedRequests', [])) @@ -805,7 +806,7 @@ async def batch_delete_requests(self, requests: list[dict]) -> dict: json=requests, timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def list_requests( self, @@ -830,7 +831,7 @@ async def list_requests( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def unlock_requests(self: RequestQueueClientAsync) -> dict: """Unlock all requests in the queue, which were locked by the same clientKey or from the same Actor run. @@ -848,4 +849,4 @@ async def unlock_requests(self: RequestQueueClientAsync) -> dict: params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) diff --git a/src/apify_client/clients/resource_clients/run.py b/src/apify_client/clients/resource_clients/run.py index 2b51aaf7..90158763 100644 --- a/src/apify_client/clients/resource_clients/run.py +++ b/src/apify_client/clients/resource_clients/run.py @@ -1,6 +1,7 @@ from __future__ import annotations import json +import json as jsonlib import logging import random import string @@ -148,7 +149,7 @@ def metamorph( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def resurrect( self, @@ -195,7 +196,7 @@ def resurrect( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def reboot(self) -> dict: """Reboot an Actor run. Only runs that are running, i.e. runs with status RUNNING can be rebooted. @@ -209,7 +210,7 @@ def reboot(self) -> dict: url=self._url('reboot'), method='POST', ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def dataset(self) -> DatasetClient: """Get the client for the default dataset of the Actor run. @@ -470,7 +471,7 @@ async def metamorph( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def resurrect( self, @@ -517,7 +518,7 @@ async def resurrect( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def reboot(self) -> dict: """Reboot an Actor run. Only runs that are running, i.e. runs with status RUNNING can be rebooted. @@ -531,7 +532,7 @@ async def reboot(self) -> dict: url=self._url('reboot'), method='POST', ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def dataset(self) -> DatasetClientAsync: """Get the client for the default dataset of the Actor run. diff --git a/src/apify_client/clients/resource_clients/schedule.py b/src/apify_client/clients/resource_clients/schedule.py index 315c5b83..360e819e 100644 --- a/src/apify_client/clients/resource_clients/schedule.py +++ b/src/apify_client/clients/resource_clients/schedule.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import Any from apify_shared.utils import filter_out_none_values_recursively, ignore_docs @@ -115,7 +116,7 @@ def get_log(self) -> list | None: method='GET', params=self._params(), ) - return pluck_data_as_list(response.json()) + return pluck_data_as_list(jsonlib.loads(response.text)) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -205,7 +206,7 @@ async def get_log(self) -> list | None: method='GET', params=self._params(), ) - return pluck_data_as_list(response.json()) + return pluck_data_as_list(jsonlib.loads(response.text)) except ApifyApiError as exc: catch_not_found_or_throw(exc) diff --git a/src/apify_client/clients/resource_clients/task.py b/src/apify_client/clients/resource_clients/task.py index e791f46b..7276650e 100644 --- a/src/apify_client/clients/resource_clients/task.py +++ b/src/apify_client/clients/resource_clients/task.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import TYPE_CHECKING, Any, cast from apify_shared.utils import ( @@ -201,7 +202,7 @@ def start( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def call( self, @@ -264,7 +265,7 @@ def get_input(self) -> dict | None: method='GET', params=self._params(), ) - return cast('dict', response.json()) + return cast('dict', jsonlib.loads(response.text)) except ApifyApiError as exc: catch_not_found_or_throw(exc) return None @@ -283,7 +284,7 @@ def update_input(self, *, task_input: dict) -> dict: params=self._params(), json=task_input, ) - return cast('dict', response.json()) + return cast('dict', jsonlib.loads(response.text)) def runs(self) -> RunCollectionClient: """Retrieve a client for the runs of this task.""" @@ -458,7 +459,7 @@ async def start( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def call( self, @@ -521,7 +522,7 @@ async def get_input(self) -> dict | None: method='GET', params=self._params(), ) - return cast('dict', response.json()) + return cast('dict', jsonlib.loads(response.text)) except ApifyApiError as exc: catch_not_found_or_throw(exc) return None @@ -540,7 +541,7 @@ async def update_input(self, *, task_input: dict) -> dict: params=self._params(), json=task_input, ) - return cast('dict', response.json()) + return cast('dict', jsonlib.loads(response.text)) def runs(self) -> RunCollectionClientAsync: """Retrieve a client for the runs of this task.""" diff --git a/src/apify_client/clients/resource_clients/user.py b/src/apify_client/clients/resource_clients/user.py index 71ab9998..87d7aeb2 100644 --- a/src/apify_client/clients/resource_clients/user.py +++ b/src/apify_client/clients/resource_clients/user.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import Any from apify_shared.utils import filter_out_none_values_recursively, ignore_docs, parse_date_fields @@ -50,7 +51,7 @@ def monthly_usage(self) -> dict | None: method='GET', params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -74,7 +75,7 @@ def limits(self) -> dict | None: method='GET', params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -142,7 +143,7 @@ async def monthly_usage(self) -> dict | None: method='GET', params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -166,7 +167,7 @@ async def limits(self) -> dict | None: method='GET', params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) diff --git a/src/apify_client/clients/resource_clients/webhook.py b/src/apify_client/clients/resource_clients/webhook.py index 87592a15..a74d0216 100644 --- a/src/apify_client/clients/resource_clients/webhook.py +++ b/src/apify_client/clients/resource_clients/webhook.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import TYPE_CHECKING, Any from apify_shared.utils import ( @@ -151,7 +152,7 @@ def test(self) -> dict | None: params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -262,7 +263,7 @@ async def test(self) -> dict | None: params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 5114a4b1..5768e87f 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -21,7 +21,7 @@ def apify_client() -> ApifyClient: # This fixture can't be session-scoped, # because then you start getting `RuntimeError: Event loop is closed` errors, -# because `httpx.AsyncClient` in `ApifyClientAsync` tries to reuse the same event loop across requests, +# because `impit.AsyncClient` in `ApifyClientAsync` tries to reuse the same event loop across requests, # but `pytest-asyncio` closes the event loop after each test, # and uses a new one for the next test. @pytest.fixture diff --git a/tests/unit/test_client_timeouts.py b/tests/unit/test_client_timeouts.py index 3259c977..70d09b90 100644 --- a/tests/unit/test_client_timeouts.py +++ b/tests/unit/test_client_timeouts.py @@ -1,13 +1,10 @@ from __future__ import annotations -import time -from functools import partial -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from unittest.mock import Mock import pytest -import respx -from werkzeug import Response as WerkzeugResponse +from impit import Response, TimeoutException from apify_client import ApifyClient from apify_client._http_client import HTTPClient, HTTPClientAsync @@ -17,16 +14,33 @@ from apify_client.clients.resource_clients import key_value_store as kvs if TYPE_CHECKING: - from httpx import Request, Response + from collections.abc import Iterator + from pytest_httpserver import HTTPServer - from werkzeug import Request as WerkzeugRequest class EndOfTestError(Exception): """Custom exception that is raised after the relevant part of the code is executed to stop the test.""" -async def test_dynamic_timeout_async_client(httpserver: HTTPServer) -> None: +@pytest.fixture +def patch_request(monkeypatch: pytest.MonkeyPatch) -> Iterator[list]: + timeouts = [] + + def mock_request(*_args: Any, **kwargs: Any) -> None: + timeouts.append(kwargs.get('timeout')) + raise EndOfTestError + + async def mock_request_async(*args: Any, **kwargs: Any) -> None: + return mock_request(*args, **kwargs) + + monkeypatch.setattr('impit.Client.request', mock_request) + monkeypatch.setattr('impit.AsyncClient.request', mock_request_async) + yield timeouts + monkeypatch.undo() + + +async def test_dynamic_timeout_async_client(monkeypatch: pytest.MonkeyPatch) -> None: """Tests timeout values for request with retriable errors. Values should increase with each attempt, starting from initial call value and bounded by the client timeout value. @@ -34,36 +48,35 @@ async def test_dynamic_timeout_async_client(httpserver: HTTPServer) -> None: should_raise_error = iter((True, True, True, False)) call_timeout = 1 client_timeout = 5 - expected_timeouts = iter((call_timeout, 2, 4, client_timeout)) + expected_timeouts = [call_timeout, 2, 4, client_timeout] retry_counter_mock = Mock() - def slow_handler(_request: WerkzeugRequest) -> WerkzeugResponse: - timeout = next(expected_timeouts) - should_raise = next(should_raise_error) - # Counter for retries - retry_counter_mock() + timeouts = [] + async def mock_request(*_args: Any, **kwargs: Any) -> Response: + timeouts.append(kwargs.get('timeout')) + retry_counter_mock() + should_raise = next(should_raise_error) if should_raise: - # We expect longer than the client is willing to wait. This will cause a timeout on the client side. - time.sleep(timeout + 0.02) + raise TimeoutException - return WerkzeugResponse('200 OK') + return Response(status_code=200) - httpserver.expect_request('/async_timeout', method='GET').respond_with_handler(slow_handler) + monkeypatch.setattr('impit.AsyncClient.request', mock_request) - server_url = str(httpserver.url_for('/async_timeout')) response = await HTTPClientAsync(timeout_secs=client_timeout).call( - method='GET', url=server_url, timeout_secs=call_timeout + method='GET', url='http://placeholder.url/async_timeout', timeout_secs=call_timeout ) # Check that the retry counter was called the expected number of times # (4 times: 3 retries + 1 final successful call) assert retry_counter_mock.call_count == 4 + assert timeouts == expected_timeouts # Check that the response is successful assert response.status_code == 200 -def test_dynamic_timeout_sync_client(httpserver: HTTPServer) -> None: +def test_dynamic_timeout_sync_client(monkeypatch: pytest.MonkeyPatch) -> None: """Tests timeout values for request with retriable errors. Values should increase with each attempt, starting from initial call value and bounded by the client timeout value. @@ -71,48 +84,34 @@ def test_dynamic_timeout_sync_client(httpserver: HTTPServer) -> None: should_raise_error = iter((True, True, True, False)) call_timeout = 1 client_timeout = 5 - expected_timeouts = iter((call_timeout, 2, 4, client_timeout)) + expected_timeouts = [call_timeout, 2, 4, client_timeout] retry_counter_mock = Mock() - def slow_handler(_request: WerkzeugRequest) -> WerkzeugResponse: - timeout = next(expected_timeouts) - should_raise = next(should_raise_error) - # Counter for retries - retry_counter_mock() + timeouts = [] + def mock_request(*_args: Any, **kwargs: Any) -> Response: + timeouts.append(kwargs.get('timeout')) + retry_counter_mock() + should_raise = next(should_raise_error) if should_raise: - # We expect longer than the client is willing to wait. This will cause a timeout on the client side. - time.sleep(timeout + 0.02) + raise TimeoutException - return WerkzeugResponse('200 OK') + return Response(status_code=200) - httpserver.expect_request('/sync_timeout', method='GET').respond_with_handler(slow_handler) + monkeypatch.setattr('impit.Client.request', mock_request) - server_url = str(httpserver.url_for('/sync_timeout')) - - response = HTTPClient(timeout_secs=client_timeout).call(method='GET', url=server_url, timeout_secs=call_timeout) + response = HTTPClient(timeout_secs=client_timeout).call( + method='GET', url='http://placeholder.url/sync_timeout', timeout_secs=call_timeout + ) # Check that the retry counter was called the expected number of times # (4 times: 3 retries + 1 final successful call) assert retry_counter_mock.call_count == 4 + assert timeouts == expected_timeouts # Check that the response is successful assert response.status_code == 200 -def assert_timeout(expected_timeout: int, request: Request) -> Response: - """Assert that correct timeouts are set on the request and raise `EndOfTestError`. - - This is intended for tests that are only testing timeout value and further execution of the code is not desired. - """ - assert request.extensions['timeout'] == { - 'connect': expected_timeout, - 'pool': expected_timeout, - 'read': expected_timeout, - 'write': expected_timeout, - } - raise EndOfTestError - - _timeout_params = [ (DatasetClient, 'get', dataset._SMALL_TIMEOUT, {}), (DatasetClient, 'update', dataset._SMALL_TIMEOUT, {}), @@ -153,18 +152,22 @@ def assert_timeout(expected_timeout: int, request: Request) -> Response: ('client_type', 'method', 'expected_timeout', 'kwargs'), _timeout_params, ) -@respx.mock def test_specific_timeouts_for_specific_endpoints_sync( client_type: type[DatasetClient | KeyValueStoreClient | RequestQueueClient], method: str, kwargs: dict, expected_timeout: int, + patch_request: list[float | None], + httpserver: HTTPServer, ) -> None: - respx.route(host='example.com').mock(side_effect=partial(assert_timeout, expected_timeout)) - client = client_type(base_url='https://example.com', root_client=ApifyClient(), http_client=HTTPClient()) + httpserver.expect_request('/').respond_with_data(status=200) + client = client_type(base_url=httpserver.url_for('/'), root_client=ApifyClient(), http_client=HTTPClient()) with pytest.raises(EndOfTestError): getattr(client, method)(**kwargs) + assert len(patch_request) == 1 + assert patch_request[0] == expected_timeout + # This test will probably need to be reworked or skipped when switching to `impit`. # Without the mock library, it's difficult to reproduce, maybe with monkeypatch? @@ -172,14 +175,18 @@ def test_specific_timeouts_for_specific_endpoints_sync( ('client_type', 'method', 'expected_timeout', 'kwargs'), _timeout_params, ) -@respx.mock async def test_specific_timeouts_for_specific_endpoints_async( client_type: type[DatasetClient | KeyValueStoreClient | RequestQueueClient], method: str, kwargs: dict, expected_timeout: int, + patch_request: list[float | None], + httpserver: HTTPServer, ) -> None: - respx.route(host='example.com').mock(side_effect=partial(assert_timeout, expected_timeout)) - client = client_type(base_url='https://example.com', root_client=ApifyClient(), http_client=HTTPClient()) + httpserver.expect_request('/').respond_with_data(status=200) + client = client_type(base_url=httpserver.url_for('/'), root_client=ApifyClient(), http_client=HTTPClient()) with pytest.raises(EndOfTestError): await getattr(client, method)(**kwargs) + + assert len(patch_request) == 1 + assert patch_request[0] == expected_timeout diff --git a/uv.lock b/uv.lock index 4e297b08..3e15ac6e 100644 --- a/uv.lock +++ b/uv.lock @@ -6,29 +6,14 @@ resolution-markers = [ "python_full_version < '3.10'", ] -[[package]] -name = "anyio" -version = "4.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "idna" }, - { name = "sniffio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, -] - [[package]] name = "apify-client" -version = "1.12.2" +version = "1.12.3" source = { editable = "." } dependencies = [ { name = "apify-shared" }, { name = "colorama" }, - { name = "httpx" }, + { name = "impit" }, { name = "more-itertools" }, ] @@ -46,7 +31,6 @@ dev = [ { name = "pytest-timeout" }, { name = "pytest-xdist" }, { name = "redbaron" }, - { name = "respx" }, { name = "ruff" }, { name = "setuptools" }, { name = "types-colorama" }, @@ -57,7 +41,7 @@ dev = [ requires-dist = [ { name = "apify-shared", specifier = "<2.0.0" }, { name = "colorama", specifier = ">=0.4.0" }, - { name = "httpx", specifier = ">=0.25" }, + { name = "impit", specifier = ">=0.5.2" }, { name = "more-itertools", specifier = ">=10.0.0" }, ] @@ -75,7 +59,6 @@ dev = [ { name = "pytest-timeout", specifier = ">=2.4.0" }, { name = "pytest-xdist", specifier = "~=3.8.0" }, { name = "redbaron", specifier = "~=0.9.0" }, - { name = "respx", specifier = "~=0.22.0" }, { name = "ruff", specifier = "~=0.12.0" }, { name = "setuptools" }, { name = "types-colorama", specifier = "~=0.4.15.20240106" }, @@ -516,43 +499,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a7/55/588425bdbe8097b621db813e9b33f0a8a7257771683e0f5369c6c8eb66ab/griffe-1.11.0-py3-none-any.whl", hash = "sha256:dc56cc6af8d322807ecdb484b39838c7a51ca750cf21ccccf890500c4d6389d8", size = 137576, upload-time = "2025-08-07T18:23:34.859Z" }, ] -[[package]] -name = "h11" -version = "0.16.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, -] - -[[package]] -name = "httpcore" -version = "1.0.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, -] - [[package]] name = "identify" version = "2.6.12" @@ -571,6 +517,55 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] +[[package]] +name = "impit" +version = "0.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/b9/713caad9ebb3fe3d016b6715a8f2fa8dad070159907b36bb743ea6a13919/impit-0.5.2.tar.gz", hash = "sha256:027641ca72c807372101ae4c3533fc2c3c837413ead4b7461a044ec5b52f3b8e", size = 89508, upload-time = "2025-08-11T15:51:00.009Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/54/48f04b0ceeeaa8ed569c61e032448fad2960cd6c942ab25d86ffcf1d1661/impit-0.5.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:06a0fc3a2d87ea2b4e43c7c109aebe94ff77135685874e9371df07504306367d", size = 3842507, upload-time = "2025-08-11T15:49:53.199Z" }, + { url = "https://files.pythonhosted.org/packages/f3/00/22030ddbc0af4fc5c09049567922288dac7b72d07853b0a8637e090fc97a/impit-0.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f970b8f0d5f6ab06bbc7b2cb19f027d8126e9123d9fd566de552e6ad7e015738", size = 3670030, upload-time = "2025-08-11T15:49:55.175Z" }, + { url = "https://files.pythonhosted.org/packages/af/28/6712b06553576c2a014229be5fc89541afbd28769c525ae82b9212b4aadd/impit-0.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e45512070d2d8eb4262a2ff29475326b507df7b10a74dceb3022045ecf851e6b", size = 6075245, upload-time = "2025-08-11T15:49:56.917Z" }, + { url = "https://files.pythonhosted.org/packages/01/c0/8efb430c964b6fd0d51b695ac99e76c421d740340257be5f24673ab2705c/impit-0.5.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:14a49fbfec6f10449758adfefb142e5df4a8f8acb0368a4f0a4e7e41e0d0ce3d", size = 6375009, upload-time = "2025-08-11T15:49:58.699Z" }, + { url = "https://files.pythonhosted.org/packages/0c/10/b3b66bd0e1867a1b9d2e6b0834154bf787d8750acca445e0f98dbce95f96/impit-0.5.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15b2454fe01980cc078d07c47451eb12b4965b8c06cf37d0f7b0d14d1a17f3b2", size = 6223166, upload-time = "2025-08-11T15:50:00.617Z" }, + { url = "https://files.pythonhosted.org/packages/70/f0/0011b88aca3047e1bb878dd1be417939546eeb69e348d82249bf77b58e1c/impit-0.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:a315e931fc9dc4063694c36a31eb54d0bdedb984d5f7ee3af2f1e9ce9597417d", size = 3878639, upload-time = "2025-08-11T15:50:02.218Z" }, + { url = "https://files.pythonhosted.org/packages/3c/73/49efd83800151e3e4fda9ee4c6195d645cc0f9d4f154b0fb8c469fc53097/impit-0.5.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:1d94c20d167942d0cc2728ffd69a87e0dfbfe7c8610f924f7628b3c8d2e1a63a", size = 3842528, upload-time = "2025-08-11T15:50:04.312Z" }, + { url = "https://files.pythonhosted.org/packages/e1/c9/2a810dbb05c6a706ec57e69ddce178cf04f6a047f1ca628a412285cd99fa/impit-0.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:91f0ab2b18134cd9f73d0d7ed0954144e3fce01048267d76fa36e98361e7c01e", size = 3669966, upload-time = "2025-08-11T15:50:05.72Z" }, + { url = "https://files.pythonhosted.org/packages/b6/05/0a954a0ed83445a6e0c667cc20939f879c4ff15a4e45fd04099636ce75d6/impit-0.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a0d9bb6038da12dbb4cb98fa00d0ec460ac022e5c91b0a4947587919e57997b", size = 6075012, upload-time = "2025-08-11T15:50:07.203Z" }, + { url = "https://files.pythonhosted.org/packages/d7/b3/b5189879471db1077d105a4ef33c9eb2c8a578ca1dfd526b1589c8d86a2d/impit-0.5.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a3e68cfea0ae3ba47623924f0a94ad2ce9392310e38ebb8468de8bea0dfea2cb", size = 6374946, upload-time = "2025-08-11T15:50:08.695Z" }, + { url = "https://files.pythonhosted.org/packages/4f/fb/6f165cf5b4dd87354b64c9be58a7c1da006f7c2df2b048818f0ab8a9f233/impit-0.5.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:98186d654216cc6bbe615fa75f36359d4e865ebff919e6544f52f63d8c0f9a76", size = 6223234, upload-time = "2025-08-11T15:50:10.478Z" }, + { url = "https://files.pythonhosted.org/packages/91/cb/ab9db5d701c27bb567c90aca98e3feafe9fc30a1487498abbf4ed5781b1a/impit-0.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:576885567b26dba214369b0eb96c8a605833d019d9e0d9a703ef2f97ac99b7d8", size = 3878579, upload-time = "2025-08-11T15:50:11.898Z" }, + { url = "https://files.pythonhosted.org/packages/8a/10/1eaf80842668992ed0c09d426d8429ec4f5bee28351cc200c2db44cc140e/impit-0.5.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5c5f3c20c9bedbdda8f61ee30d0b1eee291882045028c3c784dcb13849bf92f8", size = 3842234, upload-time = "2025-08-11T15:50:13.396Z" }, + { url = "https://files.pythonhosted.org/packages/5b/84/5a862c24df552e8774c1f8a54391e9d46f7ed23cad445c01e9ea2bcb39d3/impit-0.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:22c6a7d313b98bcefe0d341912ea5cc9dc78c6f4a7ff5ebf0eef8abca0a74b8f", size = 3668917, upload-time = "2025-08-11T15:50:15.352Z" }, + { url = "https://files.pythonhosted.org/packages/36/96/f56f277c53434517a1e6542a55131fd97ed663f5abe6bd89fd750abf0f7a/impit-0.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:688302db9bca919c0facb865cbbcbdba6a40ee69785250c6317090d5d6a6bf48", size = 6074079, upload-time = "2025-08-11T15:50:16.822Z" }, + { url = "https://files.pythonhosted.org/packages/35/4d/ee9afc72406b11fe743132ccdc3d994dc23c3eacb92e37d6a73990aea2fe/impit-0.5.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f81654a9ba181cc8ff974d79063446a1072549c37a12f1184a215c18e73def21", size = 6373260, upload-time = "2025-08-11T15:50:18.366Z" }, + { url = "https://files.pythonhosted.org/packages/0d/73/80e39340b940296f04586c2b14992fb7f43b55be630e3a5e78f106473fbe/impit-0.5.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd10a83f7e848fe8e7ae965615397641334eb036d5c3bbc1cdcef709dd42a9d3", size = 6222186, upload-time = "2025-08-11T15:50:19.91Z" }, + { url = "https://files.pythonhosted.org/packages/02/43/13e5f382ab37cdd0ff0f9335f6f3eddc67fecc36449a32a5ce6f1a18cded/impit-0.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:d273022353c640af064f51e25192fe4a71b84ffc451e57c92c632c4bb75af2c9", size = 3877409, upload-time = "2025-08-11T15:50:21.345Z" }, + { url = "https://files.pythonhosted.org/packages/29/7b/edf5661c19e3fbd257b525d22ae041ffb9704452591ab670962b834a73c4/impit-0.5.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:160c43a7affa1f530dd9a18df0584c5a38c3081d6574ce544b06a1f4736b20b7", size = 3841954, upload-time = "2025-08-11T15:50:23.173Z" }, + { url = "https://files.pythonhosted.org/packages/08/f3/f718e3c6e7e8f0ad256588a00114e6ed9134160fd4ae32556db2c3a126e6/impit-0.5.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:01a0c99566e1d198b83d007545c36cb578786266bda7b203841d9cd910f91516", size = 3668427, upload-time = "2025-08-11T15:50:24.885Z" }, + { url = "https://files.pythonhosted.org/packages/a2/cf/d23eaf20f300c6298769b588ca660cc590ffaa21dd13a0c2700dcd96a226/impit-0.5.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cf8e8d5322980face7e5824fcc4c000fd842d90c3bc58d2fd2fec3e08a8981", size = 6074706, upload-time = "2025-08-11T15:50:26.496Z" }, + { url = "https://files.pythonhosted.org/packages/c2/d2/9d48b6f0e84af3875579c1a2ebbd678eefe29e4b2d8e2679a1a2f7693d6f/impit-0.5.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:109f7b6e45936b1e9abf8df8909753e94156cb475aa8f255a27297b6672ea1c7", size = 6373178, upload-time = "2025-08-11T15:50:28.196Z" }, + { url = "https://files.pythonhosted.org/packages/2b/53/6c839dd06427d0515ab343f011b053342ced17e6431d1095c8e0a271cb54/impit-0.5.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7bfc390c8e4b08e89be07a9027c020d9b0df97adc4fdc03224e69351e8be592c", size = 6222122, upload-time = "2025-08-11T15:50:29.761Z" }, + { url = "https://files.pythonhosted.org/packages/1d/01/e97e8dd4365e6dab52905e511d1fc4ce83a106d4f50419c2e33c40644906/impit-0.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:27bdb876336ed1de1ba6794da791761911f2f4c10242dc6ac639371080a5027a", size = 3877549, upload-time = "2025-08-11T15:50:31.362Z" }, + { url = "https://files.pythonhosted.org/packages/9d/16/40147ec10b0c37a05ee97aef5fce028fb22ef040c21a5912030687dfb0b7/impit-0.5.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ef34e005f128a83bf1e3e9da53551d508ac50baecaa2f32f2a77750581e2270f", size = 6373218, upload-time = "2025-08-11T15:50:32.87Z" }, + { url = "https://files.pythonhosted.org/packages/7c/eb/d16e74d2412d8660f6bfc6c01fbb86499c6af750e36ab30d0a0250a27aa2/impit-0.5.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:940a6a0fe277867da9efdf6c266d64ba8c625da447b2cdc6d30fe408c2c6b6ca", size = 6223153, upload-time = "2025-08-11T15:50:34.456Z" }, + { url = "https://files.pythonhosted.org/packages/a1/d6/14a531d4b433a095f252f3b89dc1c37932fa5480453afc5ed2381c055aeb/impit-0.5.2-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fdd250277c8bbfec4b84c88d5b129438d43e1a9642c0aaaf7885884bd1bc22", size = 6074030, upload-time = "2025-08-11T15:50:35.922Z" }, + { url = "https://files.pythonhosted.org/packages/65/be/fcae13746a8d9a1854ba446d0910faa24d0c0a32052d6da41cd794bcd310/impit-0.5.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:6e3a0982533f63c142e3852b52024ecc10d2af62d6f2314baf80cee18253870b", size = 3842910, upload-time = "2025-08-11T15:50:37.463Z" }, + { url = "https://files.pythonhosted.org/packages/eb/27/a2c34835f64154fc91b9aba87ecc9f642e38a44bf23d0b414271371f7db4/impit-0.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c36c9311ef74944e0ebf9cf2bb0cf660c3ea8521d2766fae2bc8ccfefeb423c2", size = 3670390, upload-time = "2025-08-11T15:50:38.938Z" }, + { url = "https://files.pythonhosted.org/packages/be/a8/cfc4d2bede1d4ae930a8b8ac3b86b7dc2fbe612fb41893c236bdcbc2c79c/impit-0.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68042849ed5178df7d2254371e70c84ab86974dbdd379e201a30897ee8e0a67c", size = 6075478, upload-time = "2025-08-11T15:50:40.61Z" }, + { url = "https://files.pythonhosted.org/packages/fb/6f/a4cf7b04b30ba8187985adac252436b74668503459e87ce4c2559cba8c42/impit-0.5.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5aeb9b58856d0587485c3f670d146d67e5219a48ec6842a7c2bc32a9578f3bf3", size = 6375760, upload-time = "2025-08-11T15:50:42.457Z" }, + { url = "https://files.pythonhosted.org/packages/d0/ec/24ce94d93283ddc0cf5703f23d41a27e9779a6700f6385e0b3ea22c89e1a/impit-0.5.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d55aa40b8a8795358a2b51bb2e27aa2224de4a413816df431e6abdf9029955b", size = 6223400, upload-time = "2025-08-11T15:50:44.302Z" }, + { url = "https://files.pythonhosted.org/packages/04/10/0e29f978d97d9d56cfb652b4e9c832be068c5fbca3fc62e735c1a9ca7cbd/impit-0.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:e8011d3484d87f54c7919f971820edac8f10d368f6db49330aa64f1a7586019b", size = 3878261, upload-time = "2025-08-11T15:50:46.012Z" }, + { url = "https://files.pythonhosted.org/packages/74/f9/db47144fc13a1ed12bdf3e0897f05f80fe6c031ad7c18998f390e3a83e68/impit-0.5.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bfc41c77fccc4e6be2ed7647553d32c35aaa879b528550aee48222995fff42", size = 6075110, upload-time = "2025-08-11T15:50:47.425Z" }, + { url = "https://files.pythonhosted.org/packages/74/dc/a1c26e977e2b27f039744da58e5e8a08a21f183b8687a5dff02426fa6663/impit-0.5.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:c831ff21e08a52d720798208eeb32cf8bdec3a933ae2c9fcbe98c7bf27a18d1a", size = 6375638, upload-time = "2025-08-11T15:50:48.966Z" }, + { url = "https://files.pythonhosted.org/packages/5b/8e/7fdc9ea49e74a1814ce0bc9e0d48d3968ce870abafbb14928de75f3526aa/impit-0.5.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:b961185932c83753d95432dd6885934984f3bdfa0991b18e3554205e42f955d6", size = 6223026, upload-time = "2025-08-11T15:50:50.749Z" }, + { url = "https://files.pythonhosted.org/packages/f8/02/2fdcfb828751939a39b9093449b53f04ab769c5634cca21524e3e15f89f3/impit-0.5.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b8c2f3c8dfa1c0e3defab72ba72fe10bdd31bb026f52a9a041f094b2f642f8d", size = 6075198, upload-time = "2025-08-11T15:50:52.662Z" }, + { url = "https://files.pythonhosted.org/packages/0d/41/0fdb5d609218d86daec041f77854b10ae78a14100466f612bda82c142bf8/impit-0.5.2-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:26ddc9925be14d18afe025f04bb88bb40c9e9d240436cfe640533149634ea143", size = 6375549, upload-time = "2025-08-11T15:50:54.078Z" }, + { url = "https://files.pythonhosted.org/packages/ef/cb/85adf6ede6f2eb5814c21972ca73747e2437be91f5d7f2bed5e0fcd66471/impit-0.5.2-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:b55b5ba8f17d0981d2a7d9209ef6d97cf7b8deb2f7e982c0b5852763974d34c6", size = 6223078, upload-time = "2025-08-11T15:50:55.624Z" }, + { url = "https://files.pythonhosted.org/packages/36/e2/75a868f1cb15c1fa67c1864a10aa714cda6dfe7cb16454d0a21b3ec4fed6/impit-0.5.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2f9897a93666b16c7ae663a76cf5d94c27b23a90a69e6028ba059d27186f0a04", size = 6375441, upload-time = "2025-08-11T15:50:57.143Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ad/2760aca73d6c86e853a65031a1ded926ec8801a95681ee15b6c9b11757e2/impit-0.5.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0c2cb45d500c6fdace0c38a951f9ac19575371c11ceb593d46ba191ed63e9288", size = 6223254, upload-time = "2025-08-11T15:50:58.611Z" }, +] + [[package]] name = "iniconfig" version = "2.1.0" @@ -1013,18 +1008,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, ] -[[package]] -name = "respx" -version = "0.22.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "httpx" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f4/7c/96bd0bc759cf009675ad1ee1f96535edcb11e9666b985717eb8c87192a95/respx-0.22.0.tar.gz", hash = "sha256:3c8924caa2a50bd71aefc07aa812f2466ff489f1848c96e954a5362d17095d91", size = 28439, upload-time = "2024-12-19T22:33:59.374Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/67/afbb0978d5399bc9ea200f1d4489a23c9a1dad4eee6376242b8182389c79/respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0", size = 25127, upload-time = "2024-12-19T22:33:57.837Z" }, -] - [[package]] name = "rply" version = "0.7.8" @@ -1071,15 +1054,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, ] -[[package]] -name = "sniffio" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, -] - [[package]] name = "tomli" version = "2.2.1" diff --git a/website/sidebars.js b/website/sidebars.js index e45f2828..448eac9c 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -33,17 +33,17 @@ module.exports = { }, ], }, - // { - // type: 'category', - // label: 'Upgrading', - // collapsed: false, - // items: [ - // { - // type: 'autogenerated', - // dirName: '04_upgrading', - // }, - // ], - // }, + { + type: 'category', + label: 'Upgrading', + collapsed: false, + items: [ + { + type: 'autogenerated', + dirName: '04_upgrading', + }, + ], + }, { type: 'doc', id: 'changelog',