diff --git a/.github/workflows/run_code_checks.yaml b/.github/workflows/run_code_checks.yaml index 2fe95637..4323b479 100644 --- a/.github/workflows/run_code_checks.yaml +++ b/.github/workflows/run_code_checks.yaml @@ -36,7 +36,6 @@ jobs: integration_tests: name: Integration tests - needs: [lint_check, type_check, unit_tests] uses: apify/workflows/.github/workflows/python_integration_tests.yaml@main secrets: inherit with: diff --git a/Makefile b/Makefile index 707ebec7..73f69455 100644 --- a/Makefile +++ b/Makefile @@ -26,13 +26,13 @@ type-check: uv run mypy unit-tests: - uv run pytest --numprocesses=auto --verbose --cov=src/apify tests/unit + uv run pytest --numprocesses=auto -vv --cov=src/apify tests/unit unit-tests-cov: - uv run pytest --numprocesses=auto --verbose --cov=src/apify --cov-report=html tests/unit + uv run pytest --numprocesses=auto -vv --cov=src/apify --cov-report=html tests/unit integration-tests: - uv run pytest --numprocesses=$(INTEGRATION_TESTS_CONCURRENCY) --verbose tests/integration + uv run pytest --numprocesses=$(INTEGRATION_TESTS_CONCURRENCY) -vv tests/integration format: uv run ruff check --fix diff --git a/docs/03_concepts/code/03_dataset_exports.py b/docs/03_concepts/code/03_dataset_exports.py index 78f0f5b9..4f0c01c4 100644 --- a/docs/03_concepts/code/03_dataset_exports.py +++ b/docs/03_concepts/code/03_dataset_exports.py @@ -11,14 +11,14 @@ async def main() -> None: await dataset.export_to( content_type='csv', key='data.csv', - to_key_value_store_name='my-cool-key-value-store', + to_kvs_name='my-cool-key-value-store', ) # Export the data as JSON await dataset.export_to( content_type='json', key='data.json', - to_key_value_store_name='my-cool-key-value-store', + to_kvs_name='my-cool-key-value-store', ) # Print the exported records diff --git a/docs/03_concepts/code/conditional_actor_charge.py b/docs/03_concepts/code/conditional_actor_charge.py index 926c591d..f4695cc4 100644 --- a/docs/03_concepts/code/conditional_actor_charge.py +++ b/docs/03_concepts/code/conditional_actor_charge.py @@ -6,8 +6,8 @@ async def main() -> None: # Check the dataset because there might already be items # if the run migrated or was restarted default_dataset = await Actor.open_dataset() - dataset_info = await default_dataset.get_info() - charged_items = dataset_info.item_count if dataset_info else 0 + metadata = await default_dataset.get_metadata() + charged_items = metadata.item_count # highlight-start if Actor.get_charging_manager().get_pricing_info().is_pay_per_event: diff --git a/docs/04_upgrading/upgrading_to_v2.md b/docs/04_upgrading/upgrading_to_v2.md index 90062305..1fd1d111 100644 --- a/docs/04_upgrading/upgrading_to_v2.md +++ b/docs/04_upgrading/upgrading_to_v2.md @@ -3,7 +3,7 @@ id: upgrading-to-v2 title: Upgrading to v2 --- -This page summarizes most of the breaking changes between Apify Python SDK v1.x and v2.0. +This page summarizes the breaking changes between Apify Python SDK v1.x and v2.0. ## Python version support @@ -12,7 +12,7 @@ Support for Python 3.8 has been dropped. The Apify Python SDK v2.x now requires ## Storages - The SDK now uses [crawlee](https://github.com/apify/crawlee-python) for local storage emulation. This change should not affect intended usage (working with `Dataset`, `KeyValueStore` and `RequestQueue` classes from the `apify.storages` module or using the shortcuts exposed by the `Actor` class) in any way. -- There is a difference in the `RequestQueue.add_request` method: it accepts an `apify.Request` object instead of a free-form dictionary. +- There is a difference in the `RequestQueue.add_request` method: it accepts an `apify.Request` object instead of a free-form dictionary. - A quick way to migrate from dict-based arguments is to wrap it with a `Request.model_validate()` call. - The preferred way is using the `Request.from_url` helper which prefills the `unique_key` and `id` attributes, or instantiating it directly, e.g., `Request(url='https://example.tld', ...)`. - For simple use cases, `add_request` also accepts plain strings that contain an URL, e.g. `queue.add_request('https://example.tld')`. diff --git a/docs/04_upgrading/upgrading_to_v3.md b/docs/04_upgrading/upgrading_to_v3.md new file mode 100644 index 00000000..eba1f2d4 --- /dev/null +++ b/docs/04_upgrading/upgrading_to_v3.md @@ -0,0 +1,18 @@ +--- +id: upgrading-to-v2 +title: Upgrading to v2 +--- + +This page summarizes the breaking changes between Apify Python SDK v2.x and v3.0. + +## Python version support + +Support for Python 3.9 has been dropped. The Apify Python SDK v3.x now requires Python 3.10 or later. Make sure your environment is running a compatible version before upgrading. + +## Storages + + + +## Storage clients + + diff --git a/pyproject.toml b/pyproject.toml index 68bb5847..5d6a44f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,8 @@ keywords = [ dependencies = [ "apify-client<2.0.0", "apify-shared<2.0.0", - "crawlee~=0.6.0", + "crawlee@git+https://github.com/apify/crawlee-python.git@master", + "cachetools>=5.5.0", "cryptography>=42.0.0", "httpx>=0.27.0", # TODO: ensure compatibility with the latest version of lazy-object-proxy @@ -77,6 +78,7 @@ dev = [ "pytest~=8.4.0", "ruff~=0.12.0", "setuptools", # setuptools are used by pytest but not explicitly required + "types-cachetools>=6.0.0.20250525", "uvicorn[standard]", "werkzeug~=3.1.3", # Werkzeug is used by httpserver "yarl~=1.20.0", # yarl is used by crawlee @@ -85,6 +87,9 @@ dev = [ [tool.hatch.build.targets.wheel] packages = ["src/apify"] +[tool.hatch.metadata] +allow-direct-references = true + [tool.ruff] line-length = 120 include = ["src/**/*.py", "tests/**/*.py", "docs/**/*.py", "website/**/*.py"] diff --git a/src/apify/_actor.py b/src/apify/_actor.py index 8f3c3c51..f2ec00ac 100644 --- a/src/apify/_actor.py +++ b/src/apify/_actor.py @@ -30,11 +30,11 @@ from apify._consts import EVENT_LISTENERS_TIMEOUT from apify._crypto import decrypt_input_secrets, load_private_key from apify._models import ActorRun -from apify._platform_event_manager import EventManager, LocalEventManager, PlatformEventManager from apify._proxy_configuration import ProxyConfiguration from apify._utils import docs_group, docs_name, get_system_info, is_running_in_ipython -from apify.apify_storage_client import ApifyStorageClient +from apify.events import ApifyEventManager, EventManager, LocalEventManager from apify.log import _configure_logging, logger +from apify.storage_clients import ApifyStorageClient from apify.storages import Dataset, KeyValueStore, RequestQueue if TYPE_CHECKING: @@ -126,11 +126,11 @@ def __init__( # Create an instance of the cloud storage client, the local storage client is obtained # from the service locator. - self._cloud_storage_client = ApifyStorageClient.from_config(config=self._configuration) + self._cloud_storage_client = ApifyStorageClient() # Set the event manager based on whether the Actor is running on the platform or locally. self._event_manager = ( - PlatformEventManager( + ApifyEventManager( config=self._configuration, persist_state_interval=self._configuration.persist_state_interval, ) diff --git a/src/apify/_configuration.py b/src/apify/_configuration.py index d044f345..f7e4f028 100644 --- a/src/apify/_configuration.py +++ b/src/apify/_configuration.py @@ -140,6 +140,39 @@ class Configuration(CrawleeConfiguration): ), ] = None + default_dataset_id: Annotated[ + str, + Field( + validation_alias=AliasChoices( + 'actor_default_dataset_id', + 'apify_default_dataset_id', + ), + description='Default dataset ID used by the Apify storage client when no ID or name is provided.', + ), + ] = 'default' + + default_key_value_store_id: Annotated[ + str, + Field( + validation_alias=AliasChoices( + 'actor_default_key_value_store_id', + 'apify_default_key_value_store_id', + ), + description='Default key-value store ID for the Apify storage client when no ID or name is provided.', + ), + ] = 'default' + + default_request_queue_id: Annotated[ + str, + Field( + validation_alias=AliasChoices( + 'actor_default_request_queue_id', + 'apify_default_request_queue_id', + ), + description='Default request queue ID for the Apify storage client when no ID or name is provided.', + ), + ] = 'default' + disable_outdated_warning: Annotated[ bool, Field( diff --git a/src/apify/_proxy_configuration.py b/src/apify/_proxy_configuration.py index 37ec01ca..730c76ab 100644 --- a/src/apify/_proxy_configuration.py +++ b/src/apify/_proxy_configuration.py @@ -20,7 +20,8 @@ if TYPE_CHECKING: from apify_client import ApifyClientAsync - from crawlee import Request + + from apify import Request APIFY_PROXY_VALUE_REGEX = re.compile(r'^[\w._~]+$') COUNTRY_CODE_REGEX = re.compile(r'^[A-Z]{2}$') diff --git a/src/apify/apify_storage_client/__init__.py b/src/apify/apify_storage_client/__init__.py deleted file mode 100644 index 8b6d517c..00000000 --- a/src/apify/apify_storage_client/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from apify.apify_storage_client._apify_storage_client import ApifyStorageClient - -__all__ = ['ApifyStorageClient'] diff --git a/src/apify/apify_storage_client/_apify_storage_client.py b/src/apify/apify_storage_client/_apify_storage_client.py deleted file mode 100644 index 0a544d58..00000000 --- a/src/apify/apify_storage_client/_apify_storage_client.py +++ /dev/null @@ -1,72 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -from typing_extensions import override - -from apify_client import ApifyClientAsync -from crawlee._utils.crypto import crypto_random_object_id -from crawlee.storage_clients import StorageClient - -from apify._utils import docs_group -from apify.apify_storage_client._dataset_client import DatasetClient -from apify.apify_storage_client._dataset_collection_client import DatasetCollectionClient -from apify.apify_storage_client._key_value_store_client import KeyValueStoreClient -from apify.apify_storage_client._key_value_store_collection_client import KeyValueStoreCollectionClient -from apify.apify_storage_client._request_queue_client import RequestQueueClient -from apify.apify_storage_client._request_queue_collection_client import RequestQueueCollectionClient - -if TYPE_CHECKING: - from apify._configuration import Configuration - - -@docs_group('Storage clients') -class ApifyStorageClient(StorageClient): - """A storage client implementation based on the Apify platform storage.""" - - def __init__(self, *, configuration: Configuration) -> None: - self._client_key = crypto_random_object_id() - self._apify_client = ApifyClientAsync( - token=configuration.token, - api_url=configuration.api_base_url, - max_retries=8, - min_delay_between_retries_millis=500, - timeout_secs=360, - ) - self._configuration = configuration - - @classmethod - def from_config(cls, config: Configuration) -> ApifyStorageClient: - return cls(configuration=config) - - @override - def dataset(self, id: str) -> DatasetClient: - return DatasetClient(self._apify_client.dataset(id)) - - @override - def datasets(self) -> DatasetCollectionClient: - return DatasetCollectionClient(self._apify_client.datasets()) - - @override - def key_value_store(self, id: str) -> KeyValueStoreClient: - return KeyValueStoreClient(self._apify_client.key_value_store(id), self._configuration.api_public_base_url) - - @override - def key_value_stores(self) -> KeyValueStoreCollectionClient: - return KeyValueStoreCollectionClient(self._apify_client.key_value_stores()) - - @override - def request_queue(self, id: str) -> RequestQueueClient: - return RequestQueueClient(self._apify_client.request_queue(id, client_key=self._client_key)) - - @override - def request_queues(self) -> RequestQueueCollectionClient: - return RequestQueueCollectionClient(self._apify_client.request_queues()) - - @override - async def purge_on_start(self) -> None: - pass - - @override - def get_rate_limit_errors(self) -> dict[int, int]: - return self._apify_client.stats.rate_limit_errors diff --git a/src/apify/apify_storage_client/_dataset_client.py b/src/apify/apify_storage_client/_dataset_client.py deleted file mode 100644 index 93c8d575..00000000 --- a/src/apify/apify_storage_client/_dataset_client.py +++ /dev/null @@ -1,190 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -from typing_extensions import override - -from crawlee.storage_clients._base import DatasetClient as BaseDatasetClient -from crawlee.storage_clients.models import DatasetItemsListPage, DatasetMetadata - -if TYPE_CHECKING: - from collections.abc import AsyncIterator - from contextlib import AbstractAsyncContextManager - - from httpx import Response - - from apify_client.clients import DatasetClientAsync - from crawlee._types import JsonSerializable - - -class DatasetClient(BaseDatasetClient): - """Dataset resource client implementation based on the Apify platform storage.""" - - def __init__(self, apify_dataset_client: DatasetClientAsync) -> None: - self._client = apify_dataset_client - - @override - async def get(self) -> DatasetMetadata | None: - result = await self._client.get() - return DatasetMetadata.model_validate(result) if result else None - - @override - async def update( - self, - *, - name: str | None = None, - ) -> DatasetMetadata: - return DatasetMetadata.model_validate( - await self._client.update( - name=name, - ) - ) - - @override - async def delete(self) -> None: - await self._client.delete() - - @override - async def list_items( - self, - *, - offset: int | None = 0, - limit: int | None = BaseDatasetClient._LIST_ITEMS_LIMIT, # noqa: SLF001 - clean: bool = False, - desc: bool = False, - fields: list[str] | None = None, - omit: list[str] | None = None, - unwind: str | None = None, - skip_empty: bool = False, - skip_hidden: bool = False, - flatten: list[str] | None = None, - view: str | None = None, - ) -> DatasetItemsListPage: - return DatasetItemsListPage.model_validate( - vars( - await self._client.list_items( - offset=offset, - limit=limit, - clean=clean, - desc=desc, - fields=fields, - omit=omit, - unwind=unwind, - skip_empty=skip_empty, - skip_hidden=skip_hidden, - flatten=flatten, - view=view, - ) - ) - ) - - @override - async def iterate_items( - self, - *, - offset: int = 0, - limit: int | None = None, - clean: bool = False, - desc: bool = False, - fields: list[str] | None = None, - omit: list[str] | None = None, - unwind: str | None = None, - skip_empty: bool = False, - skip_hidden: bool = False, - ) -> AsyncIterator[dict]: - async for item in self._client.iterate_items( - offset=offset, - limit=limit, - clean=clean, - desc=desc, - fields=fields, - omit=omit, - unwind=unwind, - skip_empty=skip_empty, - skip_hidden=skip_hidden, - ): - yield item - - @override - async def get_items_as_bytes( - self, - *, - item_format: str = 'json', - offset: int | None = None, - limit: int | None = None, - desc: bool = False, - clean: bool = False, - bom: bool = False, - delimiter: str | None = None, - fields: list[str] | None = None, - omit: list[str] | None = None, - unwind: str | None = None, - skip_empty: bool = False, - skip_header_row: bool = False, - skip_hidden: bool = False, - xml_root: str | None = None, - xml_row: str | None = None, - flatten: list[str] | None = None, - ) -> bytes: - return await self._client.get_items_as_bytes( - item_format=item_format, - offset=offset, - limit=limit, - desc=desc, - clean=clean, - bom=bom, - delimiter=delimiter, - fields=fields, - omit=omit, - unwind=unwind, - skip_empty=skip_empty, - skip_header_row=skip_header_row, - skip_hidden=skip_hidden, - xml_root=xml_root, - xml_row=xml_row, - flatten=flatten, - ) - - @override - async def stream_items( - self, - *, - item_format: str = 'json', - offset: int | None = None, - limit: int | None = None, - desc: bool = False, - clean: bool = False, - bom: bool = False, - delimiter: str | None = None, - fields: list[str] | None = None, - omit: list[str] | None = None, - unwind: str | None = None, - skip_empty: bool = False, - skip_header_row: bool = False, - skip_hidden: bool = False, - xml_root: str | None = None, - xml_row: str | None = None, - ) -> AbstractAsyncContextManager[Response | None]: - return self._client.stream_items( - item_format=item_format, - offset=offset, - limit=limit, - desc=desc, - clean=clean, - bom=bom, - delimiter=delimiter, - fields=fields, - omit=omit, - unwind=unwind, - skip_empty=skip_empty, - skip_header_row=skip_header_row, - skip_hidden=skip_hidden, - xml_root=xml_root, - xml_row=xml_row, - ) - - @override - async def push_items(self, items: JsonSerializable) -> None: - await self._client.push_items( - items=items, - ) diff --git a/src/apify/apify_storage_client/_dataset_collection_client.py b/src/apify/apify_storage_client/_dataset_collection_client.py deleted file mode 100644 index f8ffc3e8..00000000 --- a/src/apify/apify_storage_client/_dataset_collection_client.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -from typing_extensions import override - -from crawlee.storage_clients._base import DatasetCollectionClient as BaseDatasetCollectionClient -from crawlee.storage_clients.models import DatasetListPage, DatasetMetadata - -if TYPE_CHECKING: - from apify_client.clients import DatasetCollectionClientAsync - - -class DatasetCollectionClient(BaseDatasetCollectionClient): - """Dataset collection resource client implementation based on the Apify platform storage.""" - - def __init__(self, apify_dataset_collection_client: DatasetCollectionClientAsync) -> None: - self._client = apify_dataset_collection_client - - @override - async def get_or_create( - self, - *, - id: str | None = None, - name: str | None = None, - schema: dict | None = None, - ) -> DatasetMetadata: - return DatasetMetadata.model_validate( - await self._client.get_or_create( - name=id if id is not None else name, - schema=schema, - ) - ) - - @override - async def list( - self, - *, - unnamed: bool = False, - limit: int | None = None, - offset: int | None = None, - desc: bool = False, - ) -> DatasetListPage: - return DatasetListPage.model_validate( - await self._client.list( - unnamed=unnamed, - limit=limit, - offset=offset, - desc=desc, - ) - ) diff --git a/src/apify/apify_storage_client/_key_value_store_client.py b/src/apify/apify_storage_client/_key_value_store_client.py deleted file mode 100644 index 49883b3f..00000000 --- a/src/apify/apify_storage_client/_key_value_store_client.py +++ /dev/null @@ -1,109 +0,0 @@ -from __future__ import annotations - -from contextlib import asynccontextmanager -from typing import TYPE_CHECKING, Any - -from typing_extensions import override -from yarl import URL - -from crawlee.storage_clients._base import KeyValueStoreClient as BaseKeyValueStoreClient -from crawlee.storage_clients.models import KeyValueStoreListKeysPage, KeyValueStoreMetadata, KeyValueStoreRecord - -from apify._crypto import create_hmac_signature - -if TYPE_CHECKING: - from collections.abc import AsyncIterator - from contextlib import AbstractAsyncContextManager - - from httpx import Response - - from apify_client.clients import KeyValueStoreClientAsync - - -class KeyValueStoreClient(BaseKeyValueStoreClient): - """Key-value store resource client implementation based on the Apify platform storage.""" - - def __init__(self, apify_key_value_store_client: KeyValueStoreClientAsync, api_public_base_url: str) -> None: - self._client = apify_key_value_store_client - self._api_public_base_url = api_public_base_url - - @override - async def get(self) -> KeyValueStoreMetadata | None: - result = await self._client.get() - return KeyValueStoreMetadata.model_validate(result) if result else None - - @override - async def update( - self, - *, - name: str | None = None, - ) -> KeyValueStoreMetadata: - return KeyValueStoreMetadata.model_validate(await self._client.update()) - - @override - async def delete(self) -> None: - await self._client.delete() - - @override - async def list_keys( - self, - *, - limit: int = 1000, - exclusive_start_key: str | None = None, - ) -> KeyValueStoreListKeysPage: - return KeyValueStoreListKeysPage.model_validate(await self._client.list_keys()) - - @override - async def get_record(self, key: str) -> KeyValueStoreRecord | None: - result = await self._client.get_record(key) - return KeyValueStoreRecord.model_validate(result) if result else None - - @override - async def get_record_as_bytes(self, key: str) -> KeyValueStoreRecord | None: - result = await self._client.get_record_as_bytes(key) - return KeyValueStoreRecord.model_validate(result) if result else None - - @override - async def stream_record(self, key: str) -> AbstractAsyncContextManager[KeyValueStoreRecord[Response] | None]: - return self._stream_record_internal(key) - - @asynccontextmanager - async def _stream_record_internal(self, key: str) -> AsyncIterator[KeyValueStoreRecord[Response] | None]: - async with self._client.stream_record(key) as response: - yield KeyValueStoreRecord.model_validate(response) - - @override - async def set_record(self, key: str, value: Any, content_type: str | None = None) -> None: - await self._client.set_record( - key=key, - value=value, - content_type=content_type, - ) - - @override - async def delete_record(self, key: str) -> None: - await self._client.delete_record( - key=key, - ) - - async def get_public_url(self, key: str) -> str: - """Get a URL for the given key that may be used to publicly access the value in the remote key-value store. - - Args: - key: The key for which the URL should be generated. - """ - if self._client.resource_id is None: - raise ValueError('resource_id cannot be None when generating a public URL') - - public_url = ( - URL(self._api_public_base_url) / 'v2' / 'key-value-stores' / self._client.resource_id / 'records' / key - ) - - key_value_store = await self.get() - - if key_value_store is not None and isinstance(key_value_store.model_extra, dict): - url_signing_secret_key = key_value_store.model_extra.get('urlSigningSecretKey') - if url_signing_secret_key: - public_url = public_url.with_query(signature=create_hmac_signature(url_signing_secret_key, key)) - - return str(public_url) diff --git a/src/apify/apify_storage_client/_key_value_store_collection_client.py b/src/apify/apify_storage_client/_key_value_store_collection_client.py deleted file mode 100644 index 0d4caca7..00000000 --- a/src/apify/apify_storage_client/_key_value_store_collection_client.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -from typing_extensions import override - -from crawlee.storage_clients._base import KeyValueStoreCollectionClient as BaseKeyValueStoreCollectionClient -from crawlee.storage_clients.models import KeyValueStoreListPage, KeyValueStoreMetadata - -if TYPE_CHECKING: - from apify_client.clients import KeyValueStoreCollectionClientAsync - - -class KeyValueStoreCollectionClient(BaseKeyValueStoreCollectionClient): - """Key-value store collection resource client implementation based on the Apify platform storage.""" - - def __init__(self, apify_dataset_collection_client: KeyValueStoreCollectionClientAsync) -> None: - self._client = apify_dataset_collection_client - - @override - async def get_or_create( - self, - *, - id: str | None = None, - name: str | None = None, - schema: dict | None = None, - ) -> KeyValueStoreMetadata: - return KeyValueStoreMetadata.model_validate( - await self._client.get_or_create( - name=id if id is not None else name, - schema=schema, - ) - ) - - @override - async def list( - self, - *, - unnamed: bool = False, - limit: int | None = None, - offset: int | None = None, - desc: bool = False, - ) -> KeyValueStoreListPage: - return KeyValueStoreListPage.model_validate( - await self._client.list( - unnamed=unnamed, - limit=limit, - offset=offset, - desc=desc, - ) - ) diff --git a/src/apify/apify_storage_client/_request_queue_client.py b/src/apify/apify_storage_client/_request_queue_client.py deleted file mode 100644 index 036eb2ab..00000000 --- a/src/apify/apify_storage_client/_request_queue_client.py +++ /dev/null @@ -1,176 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -from typing_extensions import override - -from crawlee import Request -from crawlee.storage_clients._base import RequestQueueClient as BaseRequestQueueClient -from crawlee.storage_clients.models import ( - BatchRequestsOperationResponse, - ProcessedRequest, - ProlongRequestLockResponse, - RequestQueueHead, - RequestQueueHeadWithLocks, - RequestQueueMetadata, -) - -if TYPE_CHECKING: - from collections.abc import Sequence - - from apify_client.clients import RequestQueueClientAsync - - -class RequestQueueClient(BaseRequestQueueClient): - """Request queue resource client implementation based on the Apify platform storage.""" - - def __init__(self, apify_request_queue_client: RequestQueueClientAsync) -> None: - self._client = apify_request_queue_client - - @override - async def get(self) -> RequestQueueMetadata | None: - result = await self._client.get() - return RequestQueueMetadata.model_validate({'resourceDirectory': ''} | result) if result else None - - @override - async def update( - self, - *, - name: str | None = None, - ) -> RequestQueueMetadata: - return RequestQueueMetadata.model_validate( - {'resourceDirectory': ''} - | await self._client.update( - name=name, - ) - ) - - @override - async def delete(self) -> None: - await self._client.delete() - - @override - async def list_head(self, *, limit: int | None = None) -> RequestQueueHead: - return RequestQueueHead.model_validate( - await self._client.list_head( - limit=limit, - ), - ) - - @override - async def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) -> RequestQueueHeadWithLocks: - return RequestQueueHeadWithLocks.model_validate( - await self._client.list_and_lock_head( - lock_secs=lock_secs, - limit=limit, - ) - ) - - @override - async def add_request( - self, - request: Request, - *, - forefront: bool = False, - ) -> ProcessedRequest: - return ProcessedRequest.model_validate( - {'id': request.id, 'uniqueKey': request.unique_key} - | await self._client.add_request( - request=request.model_dump( - by_alias=True, - exclude={ - 'id', - }, - ), - forefront=forefront, - ) - ) - - @override - async def get_request(self, request_id: str) -> Request | None: - result = await self._client.get_request(request_id) - return Request.model_validate(result) if result else None - - @override - async def update_request( - self, - request: Request, - *, - forefront: bool = False, - ) -> ProcessedRequest: - return ProcessedRequest.model_validate( - {'id': request.id, 'uniqueKey': request.unique_key} - | await self._client.update_request( - request=request.model_dump( - by_alias=True, - ), - forefront=forefront, - ) - ) - - @override - async def delete_request(self, request_id: str) -> None: - await self._client.delete_request(request_id) - - @override - async def prolong_request_lock( - self, - request_id: str, - *, - forefront: bool = False, - lock_secs: int, - ) -> ProlongRequestLockResponse: - return ProlongRequestLockResponse.model_validate( - await self._client.prolong_request_lock( - request_id=request_id, - forefront=forefront, - lock_secs=lock_secs, - ) - ) - - @override - async def delete_request_lock( - self, - request_id: str, - *, - forefront: bool = False, - ) -> None: - await self._client.delete_request_lock( - request_id=request_id, - forefront=forefront, - ) - - @override - async def batch_add_requests( - self, - requests: Sequence[Request], - *, - forefront: bool = False, - ) -> BatchRequestsOperationResponse: - return BatchRequestsOperationResponse.model_validate( - await self._client.batch_add_requests( - requests=[ - r.model_dump( - by_alias=True, - exclude={ - 'id', - }, - ) - for r in requests - ], - forefront=forefront, - ) - ) - - @override - async def batch_delete_requests(self, requests: list[Request]) -> BatchRequestsOperationResponse: - return BatchRequestsOperationResponse.model_validate( - await self._client.batch_delete_requests( - requests=[ - r.model_dump( - by_alias=True, - ) - for r in requests - ], - ) - ) diff --git a/src/apify/apify_storage_client/_request_queue_collection_client.py b/src/apify/apify_storage_client/_request_queue_collection_client.py deleted file mode 100644 index 5bf28836..00000000 --- a/src/apify/apify_storage_client/_request_queue_collection_client.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -from typing_extensions import override - -from crawlee.storage_clients._base import RequestQueueCollectionClient as BaseRequestQueueCollectionClient -from crawlee.storage_clients.models import RequestQueueListPage, RequestQueueMetadata - -if TYPE_CHECKING: - from apify_client.clients import RequestQueueCollectionClientAsync - - -class RequestQueueCollectionClient(BaseRequestQueueCollectionClient): - """Request queue collection resource client implementation based on the Apify platform storage.""" - - def __init__(self, apify_request_queue_collection_client: RequestQueueCollectionClientAsync) -> None: - self._client = apify_request_queue_collection_client - - @override - async def get_or_create( - self, - *, - id: str | None = None, - name: str | None = None, - schema: dict | None = None, - ) -> RequestQueueMetadata: - return RequestQueueMetadata.model_validate( - {'resourceDirectory': ''} - | await self._client.get_or_create( - name=id if id is not None else name, - ) - ) - - @override - async def list( - self, - *, - unnamed: bool = False, - limit: int | None = None, - offset: int | None = None, - desc: bool = False, - ) -> RequestQueueListPage: - return RequestQueueListPage.model_validate( - await self._client.list( - unnamed=unnamed, - limit=limit, - offset=offset, - desc=desc, - ) - ) diff --git a/src/apify/events/__init__.py b/src/apify/events/__init__.py new file mode 100644 index 00000000..c50c4ab8 --- /dev/null +++ b/src/apify/events/__init__.py @@ -0,0 +1,5 @@ +from crawlee.events import EventManager, LocalEventManager + +from ._apify_event_manager import ApifyEventManager + +__all__ = ['ApifyEventManager', 'EventManager', 'LocalEventManager'] diff --git a/src/apify/_platform_event_manager.py b/src/apify/events/_apify_event_manager.py similarity index 58% rename from src/apify/_platform_event_manager.py rename to src/apify/events/_apify_event_manager.py index 41d9379e..5b6e6f55 100644 --- a/src/apify/_platform_event_manager.py +++ b/src/apify/events/_apify_event_manager.py @@ -1,118 +1,26 @@ from __future__ import annotations import asyncio -from datetime import datetime -from typing import TYPE_CHECKING, Annotated, Any, Literal +from typing import TYPE_CHECKING, Annotated import websockets.asyncio.client -from pydantic import BaseModel, Discriminator, Field, TypeAdapter +from pydantic import Discriminator, TypeAdapter from typing_extensions import Self, Unpack, override -from crawlee.events._event_manager import EventManager, EventManagerOptions -from crawlee.events._local_event_manager import LocalEventManager -from crawlee.events._types import ( - Event, - EventAbortingData, - EventExitData, - EventMigratingData, - EventPersistStateData, - EventSystemInfoData, -) +from crawlee.events import EventManager +from crawlee.events._types import Event, EventPersistStateData from apify._utils import docs_group +from apify.events._types import DeprecatedEvent, EventMessage, SystemInfoEventData, UnknownEvent from apify.log import logger if TYPE_CHECKING: from types import TracebackType - from apify._configuration import Configuration - -__all__ = ['EventManager', 'LocalEventManager', 'PlatformEventManager'] - - -@docs_group('Event data') -class SystemInfoEventData(BaseModel): - mem_avg_bytes: Annotated[float, Field(alias='memAvgBytes')] - mem_current_bytes: Annotated[float, Field(alias='memCurrentBytes')] - mem_max_bytes: Annotated[float, Field(alias='memMaxBytes')] - cpu_avg_usage: Annotated[float, Field(alias='cpuAvgUsage')] - cpu_max_usage: Annotated[float, Field(alias='cpuMaxUsage')] - cpu_current_usage: Annotated[float, Field(alias='cpuCurrentUsage')] - is_cpu_overloaded: Annotated[bool, Field(alias='isCpuOverloaded')] - created_at: Annotated[datetime, Field(alias='createdAt')] - - def to_crawlee_format(self, dedicated_cpus: float) -> EventSystemInfoData: - return EventSystemInfoData.model_validate( - { - 'cpu_info': { - 'used_ratio': (self.cpu_current_usage / 100) / dedicated_cpus, - 'created_at': self.created_at, - }, - 'memory_info': { - 'total_size': self.mem_max_bytes, - 'current_size': self.mem_current_bytes, - 'created_at': self.created_at, - }, - } - ) - - -@docs_group('Events') -class PersistStateEvent(BaseModel): - name: Literal[Event.PERSIST_STATE] - data: Annotated[EventPersistStateData, Field(default_factory=lambda: EventPersistStateData(is_migrating=False))] - - -@docs_group('Events') -class SystemInfoEvent(BaseModel): - name: Literal[Event.SYSTEM_INFO] - data: SystemInfoEventData - - -@docs_group('Events') -class MigratingEvent(BaseModel): - name: Literal[Event.MIGRATING] - data: Annotated[EventMigratingData, Field(default_factory=EventMigratingData)] - - -@docs_group('Events') -class AbortingEvent(BaseModel): - name: Literal[Event.ABORTING] - data: Annotated[EventAbortingData, Field(default_factory=EventAbortingData)] - - -@docs_group('Events') -class ExitEvent(BaseModel): - name: Literal[Event.EXIT] - data: Annotated[EventExitData, Field(default_factory=EventExitData)] - - -@docs_group('Events') -class EventWithoutData(BaseModel): - name: Literal[ - Event.SESSION_RETIRED, - Event.BROWSER_LAUNCHED, - Event.BROWSER_RETIRED, - Event.BROWSER_CLOSED, - Event.PAGE_CREATED, - Event.PAGE_CLOSED, - ] - data: Any = None - - -@docs_group('Events') -class DeprecatedEvent(BaseModel): - name: Literal['cpuInfo'] - data: Annotated[dict[str, Any], Field(default_factory=dict)] - - -@docs_group('Events') -class UnknownEvent(BaseModel): - name: str - data: Annotated[dict[str, Any], Field(default_factory=dict)] + from crawlee.events._event_manager import EventManagerOptions + from apify._configuration import Configuration -EventMessage = PersistStateEvent | SystemInfoEvent | MigratingEvent | AbortingEvent | ExitEvent | EventWithoutData event_data_adapter = TypeAdapter[EventMessage | DeprecatedEvent | UnknownEvent]( Annotated[EventMessage, Discriminator('name')] | DeprecatedEvent | UnknownEvent @@ -120,7 +28,7 @@ class UnknownEvent(BaseModel): @docs_group('Event managers') -class PlatformEventManager(EventManager): +class ApifyEventManager(EventManager): """A class for managing Actor events. You shouldn't use this class directly, diff --git a/src/apify/events/_types.py b/src/apify/events/_types.py new file mode 100644 index 00000000..f6ff3ee6 --- /dev/null +++ b/src/apify/events/_types.py @@ -0,0 +1,102 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Annotated, Any, Literal + +from pydantic import BaseModel, Field + +from crawlee.events._types import ( + Event, + EventAbortingData, + EventExitData, + EventMigratingData, + EventPersistStateData, + EventSystemInfoData, +) + +from apify._utils import docs_group + + +@docs_group('Event data') +class SystemInfoEventData(BaseModel): + mem_avg_bytes: Annotated[float, Field(alias='memAvgBytes')] + mem_current_bytes: Annotated[float, Field(alias='memCurrentBytes')] + mem_max_bytes: Annotated[float, Field(alias='memMaxBytes')] + cpu_avg_usage: Annotated[float, Field(alias='cpuAvgUsage')] + cpu_max_usage: Annotated[float, Field(alias='cpuMaxUsage')] + cpu_current_usage: Annotated[float, Field(alias='cpuCurrentUsage')] + is_cpu_overloaded: Annotated[bool, Field(alias='isCpuOverloaded')] + created_at: Annotated[datetime, Field(alias='createdAt')] + + def to_crawlee_format(self, dedicated_cpus: float) -> EventSystemInfoData: + return EventSystemInfoData.model_validate( + { + 'cpu_info': { + 'used_ratio': (self.cpu_current_usage / 100) / dedicated_cpus, + 'created_at': self.created_at, + }, + 'memory_info': { + 'total_size': self.mem_max_bytes, + 'current_size': self.mem_current_bytes, + 'created_at': self.created_at, + }, + } + ) + + +@docs_group('Events') +class PersistStateEvent(BaseModel): + name: Literal[Event.PERSIST_STATE] + data: Annotated[EventPersistStateData, Field(default_factory=lambda: EventPersistStateData(is_migrating=False))] + + +@docs_group('Events') +class SystemInfoEvent(BaseModel): + name: Literal[Event.SYSTEM_INFO] + data: SystemInfoEventData + + +@docs_group('Events') +class MigratingEvent(BaseModel): + name: Literal[Event.MIGRATING] + data: Annotated[EventMigratingData, Field(default_factory=EventMigratingData)] + + +@docs_group('Events') +class AbortingEvent(BaseModel): + name: Literal[Event.ABORTING] + data: Annotated[EventAbortingData, Field(default_factory=EventAbortingData)] + + +@docs_group('Events') +class ExitEvent(BaseModel): + name: Literal[Event.EXIT] + data: Annotated[EventExitData, Field(default_factory=EventExitData)] + + +@docs_group('Events') +class EventWithoutData(BaseModel): + name: Literal[ + Event.SESSION_RETIRED, + Event.BROWSER_LAUNCHED, + Event.BROWSER_RETIRED, + Event.BROWSER_CLOSED, + Event.PAGE_CREATED, + Event.PAGE_CLOSED, + ] + data: Any = None + + +@docs_group('Events') +class DeprecatedEvent(BaseModel): + name: Literal['cpuInfo'] + data: Annotated[dict[str, Any], Field(default_factory=dict)] + + +@docs_group('Events') +class UnknownEvent(BaseModel): + name: str + data: Annotated[dict[str, Any], Field(default_factory=dict)] + + +EventMessage = PersistStateEvent | SystemInfoEvent | MigratingEvent | AbortingEvent | ExitEvent | EventWithoutData diff --git a/src/apify/apify_storage_client/py.typed b/src/apify/events/py.typed similarity index 100% rename from src/apify/apify_storage_client/py.typed rename to src/apify/events/py.typed diff --git a/src/apify/request_loaders/__init__.py b/src/apify/request_loaders/__init__.py new file mode 100644 index 00000000..faf48e1d --- /dev/null +++ b/src/apify/request_loaders/__init__.py @@ -0,0 +1,18 @@ +from crawlee.request_loaders import ( + RequestList, + RequestLoader, + RequestManager, + RequestManagerTandem, + SitemapRequestLoader, +) + +from ._apify_request_list import ApifyRequestList + +__all__ = [ + 'ApifyRequestList', + 'RequestList', + 'RequestLoader', + 'RequestManager', + 'RequestManagerTandem', + 'SitemapRequestLoader', +] diff --git a/src/apify/storages/_request_list.py b/src/apify/request_loaders/_apify_request_list.py similarity index 80% rename from src/apify/storages/_request_list.py rename to src/apify/request_loaders/_apify_request_list.py index 28994041..272defed 100644 --- a/src/apify/storages/_request_list.py +++ b/src/apify/request_loaders/_apify_request_list.py @@ -3,16 +3,15 @@ import asyncio import re from asyncio import Task -from functools import partial from typing import Annotated, Any from pydantic import BaseModel, Field, TypeAdapter -from crawlee import Request from crawlee._types import HttpMethod -from crawlee.http_clients import HttpClient, HttpxHttpClient -from crawlee.request_loaders import RequestList as CrawleeRequestList +from crawlee.http_clients import HttpClient, ImpitHttpClient +from crawlee.request_loaders import RequestList +from apify import Request from apify._utils import docs_group URL_NO_COMMAS_REGEX = re.compile( @@ -39,7 +38,7 @@ class _SimpleUrlInput(_RequestDetails): @docs_group('Request loaders') -class RequestList(CrawleeRequestList): +class ApifyRequestList(RequestList): """Extends crawlee RequestList. Method open is used to create RequestList from actor's requestListSources input. @@ -50,7 +49,7 @@ async def open( name: str | None = None, request_list_sources_input: list[dict[str, Any]] | None = None, http_client: HttpClient | None = None, - ) -> RequestList: + ) -> ApifyRequestList: """Initialize a new instance from request list source input. Args: @@ -74,24 +73,26 @@ async def open( ``` """ request_list_sources_input = request_list_sources_input or [] - return await RequestList._create_request_list(name, request_list_sources_input, http_client) + return await ApifyRequestList._create_request_list(name, request_list_sources_input, http_client) @staticmethod async def _create_request_list( name: str | None, request_list_sources_input: list[dict[str, Any]], http_client: HttpClient | None - ) -> RequestList: + ) -> ApifyRequestList: if not http_client: - http_client = HttpxHttpClient() + http_client = ImpitHttpClient() url_inputs = url_input_adapter.validate_python(request_list_sources_input) simple_url_inputs = [url_input for url_input in url_inputs if isinstance(url_input, _SimpleUrlInput)] remote_url_inputs = [url_input for url_input in url_inputs if isinstance(url_input, _RequestsFromUrlInput)] - simple_url_requests = RequestList._create_requests_from_input(simple_url_inputs) - remote_url_requests = await RequestList._fetch_requests_from_url(remote_url_inputs, http_client=http_client) + simple_url_requests = ApifyRequestList._create_requests_from_input(simple_url_inputs) + remote_url_requests = await ApifyRequestList._fetch_requests_from_url( + remote_url_inputs, http_client=http_client + ) - return RequestList(name=name, requests=simple_url_requests + remote_url_requests) + return ApifyRequestList(name=name, requests=simple_url_requests + remote_url_requests) @staticmethod def _create_requests_from_input(simple_url_inputs: list[_SimpleUrlInput]) -> list[Request]: @@ -119,13 +120,15 @@ async def _fetch_requests_from_url( """ created_requests: list[Request] = [] - def create_requests_from_response(request_input: _RequestsFromUrlInput, task: Task) -> None: + async def create_requests_from_response(request_input: _RequestsFromUrlInput, task: Task) -> None: """Extract links from response body and use them to create `Request` objects. Use the regular expression to find all matching links in the response body, then create `Request` objects from these links and the provided input attributes. """ - matches = re.finditer(URL_NO_COMMAS_REGEX, task.result().read().decode('utf-8')) + response = await (task.result()).read() + matches = re.finditer(URL_NO_COMMAS_REGEX, response.decode('utf-8')) + created_requests.extend( [ Request.from_url( @@ -148,7 +151,11 @@ def create_requests_from_response(request_input: _RequestsFromUrlInput, task: Ta ) ) - get_response_task.add_done_callback(partial(create_requests_from_response, remote_url_requests_input)) + get_response_task.add_done_callback( + lambda task, inp=remote_url_requests_input: asyncio.create_task( # type: ignore[misc] + create_requests_from_response(inp, task) + ) + ) remote_url_requests.append(get_response_task) await asyncio.gather(*remote_url_requests) diff --git a/src/apify/request_loaders/py.typed b/src/apify/request_loaders/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/src/apify/scrapy/extensions/_httpcache.py b/src/apify/scrapy/extensions/_httpcache.py index 509c4d8a..14d8753d 100644 --- a/src/apify/scrapy/extensions/_httpcache.py +++ b/src/apify/scrapy/extensions/_httpcache.py @@ -13,8 +13,8 @@ from scrapy.responsetypes import responsetypes from apify import Configuration -from apify.apify_storage_client import ApifyStorageClient from apify.scrapy._async_thread import AsyncThread +from apify.storage_clients import ApifyStorageClient from apify.storages import KeyValueStore if TYPE_CHECKING: @@ -51,10 +51,14 @@ def open_spider(self, spider: Spider) -> None: kvs_name = get_kvs_name(spider.name) async def open_kvs() -> KeyValueStore: - config = Configuration.get_global_configuration() - if config.is_at_home: - storage_client = ApifyStorageClient.from_config(config) - return await KeyValueStore.open(name=kvs_name, storage_client=storage_client) + configuration = Configuration.get_global_configuration() + if configuration.is_at_home: + storage_client = ApifyStorageClient() + return await KeyValueStore.open( + name=kvs_name, + configuration=configuration, + storage_client=storage_client, + ) return await KeyValueStore.open(name=kvs_name) logger.debug("Starting background thread for cache storage's event loop") diff --git a/src/apify/scrapy/requests.py b/src/apify/scrapy/requests.py index a262b920..63bba3c7 100644 --- a/src/apify/scrapy/requests.py +++ b/src/apify/scrapy/requests.py @@ -10,9 +10,10 @@ from scrapy.http.headers import Headers from scrapy.utils.request import request_from_dict -from crawlee import Request as ApifyRequest from crawlee._types import HttpHeaders +from apify import Request as ApifyRequest + logger = getLogger(__name__) diff --git a/src/apify/scrapy/scheduler.py b/src/apify/scrapy/scheduler.py index a243a368..2dcacd9a 100644 --- a/src/apify/scrapy/scheduler.py +++ b/src/apify/scrapy/scheduler.py @@ -11,7 +11,7 @@ from ._async_thread import AsyncThread from .requests import to_apify_request, to_scrapy_request from apify import Configuration -from apify.apify_storage_client import ApifyStorageClient +from apify.storage_clients import ApifyStorageClient from apify.storages import RequestQueue if TYPE_CHECKING: @@ -49,10 +49,13 @@ def open(self, spider: Spider) -> Deferred[None] | None: self.spider = spider async def open_rq() -> RequestQueue: - config = Configuration.get_global_configuration() - if config.is_at_home: - storage_client = ApifyStorageClient.from_config(config) - return await RequestQueue.open(storage_client=storage_client) + configuration = Configuration.get_global_configuration() + if configuration.is_at_home: + storage_client = ApifyStorageClient() + return await RequestQueue.open( + configuration=configuration, + storage_client=storage_client, + ) return await RequestQueue.open() try: diff --git a/src/apify/storage_clients/__init__.py b/src/apify/storage_clients/__init__.py new file mode 100644 index 00000000..f3e5298c --- /dev/null +++ b/src/apify/storage_clients/__init__.py @@ -0,0 +1,10 @@ +from crawlee.storage_clients import MemoryStorageClient + +from ._apify import ApifyStorageClient +from ._file_system import ApifyFileSystemStorageClient as FileSystemStorageClient + +__all__ = [ + 'ApifyStorageClient', + 'FileSystemStorageClient', + 'MemoryStorageClient', +] diff --git a/src/apify/storage_clients/_apify/__init__.py b/src/apify/storage_clients/_apify/__init__.py new file mode 100644 index 00000000..4af7c8ee --- /dev/null +++ b/src/apify/storage_clients/_apify/__init__.py @@ -0,0 +1,11 @@ +from ._dataset_client import ApifyDatasetClient +from ._key_value_store_client import ApifyKeyValueStoreClient +from ._request_queue_client import ApifyRequestQueueClient +from ._storage_client import ApifyStorageClient + +__all__ = [ + 'ApifyDatasetClient', + 'ApifyKeyValueStoreClient', + 'ApifyRequestQueueClient', + 'ApifyStorageClient', +] diff --git a/src/apify/storage_clients/_apify/_dataset_client.py b/src/apify/storage_clients/_apify/_dataset_client.py new file mode 100644 index 00000000..385d6522 --- /dev/null +++ b/src/apify/storage_clients/_apify/_dataset_client.py @@ -0,0 +1,304 @@ +from __future__ import annotations + +import asyncio +from logging import getLogger +from typing import TYPE_CHECKING, Any + +from typing_extensions import override + +from apify_client import ApifyClientAsync +from crawlee._utils.byte_size import ByteSize +from crawlee._utils.file import json_dumps +from crawlee.storage_clients._base import DatasetClient +from crawlee.storage_clients.models import DatasetItemsListPage, DatasetMetadata + +if TYPE_CHECKING: + from collections.abc import AsyncIterator + + from apify_client.clients import DatasetClientAsync + from crawlee._types import JsonSerializable + + from apify import Configuration + +logger = getLogger(__name__) + + +class ApifyDatasetClient(DatasetClient): + """An Apify platform implementation of the dataset client.""" + + _MAX_PAYLOAD_SIZE = ByteSize.from_mb(9) + """Maximum size for a single payload.""" + + _SAFETY_BUFFER_COEFFICIENT = 0.01 / 100 # 0.01% + """Percentage buffer to reduce payload limit slightly for safety.""" + + _EFFECTIVE_LIMIT_SIZE = _MAX_PAYLOAD_SIZE - (_MAX_PAYLOAD_SIZE * _SAFETY_BUFFER_COEFFICIENT) + """Calculated payload limit considering safety buffer.""" + + def __init__( + self, + *, + api_client: DatasetClientAsync, + api_public_base_url: str, + lock: asyncio.Lock, + ) -> None: + """Initialize a new instance. + + Preferably use the `ApifyDatasetClient.open` class method to create a new instance. + """ + self._api_client = api_client + """The Apify dataset client for API operations.""" + + self._api_public_base_url = api_public_base_url + """The public base URL for accessing the key-value store records.""" + + self._lock = lock + """A lock to ensure that only one operation is performed at a time.""" + + @override + async def get_metadata(self) -> DatasetMetadata: + metadata = await self._api_client.get() + return DatasetMetadata.model_validate(metadata) + + @classmethod + async def open( + cls, + *, + id: str | None, + name: str | None, + configuration: Configuration, + ) -> ApifyDatasetClient: + """Open an Apify dataset client. + + This method creates and initializes a new instance of the Apify dataset client. + It handles authentication, storage lookup/creation, and metadata retrieval. + + Args: + id: The ID of an existing dataset to open. If provided, the client will connect to this specific storage. + Cannot be used together with `name`. + name: The name of a dataset to get or create. If a storage with this name exists, it will be opened; + otherwise, a new one will be created. Cannot be used together with `id`. + configuration: The configuration object containing API credentials and settings. Must include a valid + `token` and `api_base_url`. May also contain a `default_dataset_id` for fallback when neither + `id` nor `name` is provided. + + Returns: + An instance for the opened or created storage client. + + Raises: + ValueError: If the configuration is missing required fields (token, api_base_url), if both `id` and `name` + are provided, or if neither `id` nor `name` is provided and no default storage ID is available in + the configuration. + """ + token = configuration.token + if not token: + raise ValueError(f'Apify storage client requires a valid token in Configuration (token={token}).') + + api_url = configuration.api_base_url + if not api_url: + raise ValueError(f'Apify storage client requires a valid API URL in Configuration (api_url={api_url}).') + + api_public_base_url = configuration.api_public_base_url + if not api_public_base_url: + raise ValueError( + 'Apify storage client requires a valid API public base URL in Configuration ' + f'(api_public_base_url={api_public_base_url}).' + ) + + # Create Apify client with the provided token and API URL. + apify_client_async = ApifyClientAsync( + token=token, + api_url=api_url, + max_retries=8, + min_delay_between_retries_millis=500, + timeout_secs=360, + ) + apify_datasets_client = apify_client_async.datasets() + + # If both id and name are provided, raise an error. + if id and name: + raise ValueError('Only one of "id" or "name" can be specified, not both.') + + # If id is provided, get the storage by ID. + if id and name is None: + apify_dataset_client = apify_client_async.dataset(dataset_id=id) + + # If name is provided, get or create the storage by name. + if name and id is None: + id = DatasetMetadata.model_validate( + await apify_datasets_client.get_or_create(name=name), + ).id + apify_dataset_client = apify_client_async.dataset(dataset_id=id) + + # If both id and name are None, try to get the default storage ID from environment variables. + # The default storage ID environment variable is set by the Apify platform. It also contains + # a new storage ID after Actor's reboot or migration. + if id is None and name is None: + id = configuration.default_dataset_id + apify_dataset_client = apify_client_async.dataset(dataset_id=id) + + # Fetch its metadata. + metadata = await apify_dataset_client.get() + + # If metadata is None, it means the storage does not exist, so we create it. + if metadata is None: + id = DatasetMetadata.model_validate( + await apify_datasets_client.get_or_create(), + ).id + apify_dataset_client = apify_client_async.dataset(dataset_id=id) + + # Verify that the storage exists by fetching its metadata again. + metadata = await apify_dataset_client.get() + if metadata is None: + raise ValueError(f'Opening dataset with id={id} and name={name} failed.') + + return cls( + api_client=apify_dataset_client, + api_public_base_url=api_public_base_url, + lock=asyncio.Lock(), + ) + + @override + async def purge(self) -> None: + raise NotImplementedError( + 'Purging datasets is not supported in the Apify platform. ' + 'Use the `drop` method to delete the dataset instead.' + ) + + @override + async def drop(self) -> None: + async with self._lock: + await self._api_client.delete() + + @override + async def push_data(self, data: list[Any] | dict[str, Any]) -> None: + async def payloads_generator() -> AsyncIterator[str]: + for index, item in enumerate(data): + yield await self._check_and_serialize(item, index) + + async with self._lock: + # Handle lists + if isinstance(data, list): + # Invoke client in series to preserve the order of data + async for items in self._chunk_by_size(payloads_generator()): + await self._api_client.push_items(items=items) + + # Handle singular items + else: + items = await self._check_and_serialize(data) + await self._api_client.push_items(items=items) + + @override + async def get_data( + self, + *, + offset: int = 0, + limit: int | None = 999_999_999_999, + clean: bool = False, + desc: bool = False, + fields: list[str] | None = None, + omit: list[str] | None = None, + unwind: str | None = None, + skip_empty: bool = False, + skip_hidden: bool = False, + flatten: list[str] | None = None, + view: str | None = None, + ) -> DatasetItemsListPage: + response = await self._api_client.list_items( + offset=offset, + limit=limit, + clean=clean, + desc=desc, + fields=fields, + omit=omit, + unwind=unwind, + skip_empty=skip_empty, + skip_hidden=skip_hidden, + flatten=flatten, + view=view, + ) + return DatasetItemsListPage.model_validate(vars(response)) + + @override + async def iterate_items( + self, + *, + offset: int = 0, + limit: int | None = None, + clean: bool = False, + desc: bool = False, + fields: list[str] | None = None, + omit: list[str] | None = None, + unwind: str | None = None, + skip_empty: bool = False, + skip_hidden: bool = False, + ) -> AsyncIterator[dict]: + async for item in self._api_client.iterate_items( + offset=offset, + limit=limit, + clean=clean, + desc=desc, + fields=fields, + omit=omit, + unwind=unwind, + skip_empty=skip_empty, + skip_hidden=skip_hidden, + ): + yield item + + @classmethod + async def _check_and_serialize(cls, item: JsonSerializable, index: int | None = None) -> str: + """Serialize a given item to JSON, checks its serializability and size against a limit. + + Args: + item: The item to serialize. + index: Index of the item, used for error context. + + Returns: + Serialized JSON string. + + Raises: + ValueError: If item is not JSON serializable or exceeds size limit. + """ + s = ' ' if index is None else f' at index {index} ' + + try: + payload = await json_dumps(item) + except Exception as exc: + raise ValueError(f'Data item{s}is not serializable to JSON.') from exc + + payload_size = ByteSize(len(payload.encode('utf-8'))) + if payload_size > cls._EFFECTIVE_LIMIT_SIZE: + raise ValueError(f'Data item{s}is too large (size: {payload_size}, limit: {cls._EFFECTIVE_LIMIT_SIZE})') + + return payload + + async def _chunk_by_size(self, items: AsyncIterator[str]) -> AsyncIterator[str]: + """Yield chunks of JSON arrays composed of input strings, respecting a size limit. + + Groups an iterable of JSON string payloads into larger JSON arrays, ensuring the total size + of each array does not exceed `EFFECTIVE_LIMIT_SIZE`. Each output is a JSON array string that + contains as many payloads as possible without breaching the size threshold, maintaining the + order of the original payloads. Assumes individual items are below the size limit. + + Args: + items: Iterable of JSON string payloads. + + Yields: + Strings representing JSON arrays of payloads, each staying within the size limit. + """ + last_chunk_size = ByteSize(2) # Add 2 bytes for [] wrapper. + current_chunk = [] + + async for payload in items: + payload_size = ByteSize(len(payload.encode('utf-8'))) + + if last_chunk_size + payload_size <= self._EFFECTIVE_LIMIT_SIZE: + current_chunk.append(payload) + last_chunk_size += payload_size + ByteSize(1) # Add 1 byte for ',' separator. + else: + yield f'[{",".join(current_chunk)}]' + current_chunk = [payload] + last_chunk_size = payload_size + ByteSize(2) # Add 2 bytes for [] wrapper. + + yield f'[{",".join(current_chunk)}]' diff --git a/src/apify/storage_clients/_apify/_key_value_store_client.py b/src/apify/storage_clients/_apify/_key_value_store_client.py new file mode 100644 index 00000000..fb841320 --- /dev/null +++ b/src/apify/storage_clients/_apify/_key_value_store_client.py @@ -0,0 +1,241 @@ +from __future__ import annotations + +import asyncio +from logging import getLogger +from typing import TYPE_CHECKING, Any + +from typing_extensions import override +from yarl import URL + +from apify_client import ApifyClientAsync +from crawlee.storage_clients._base import KeyValueStoreClient +from crawlee.storage_clients.models import KeyValueStoreRecord, KeyValueStoreRecordMetadata + +from ._models import ApifyKeyValueStoreMetadata, KeyValueStoreListKeysPage +from apify._crypto import create_hmac_signature + +if TYPE_CHECKING: + from collections.abc import AsyncIterator + + from apify_client.clients import KeyValueStoreClientAsync + + from apify import Configuration + +logger = getLogger(__name__) + + +class ApifyKeyValueStoreClient(KeyValueStoreClient): + """An Apify platform implementation of the key-value store client.""" + + def __init__( + self, + *, + api_client: KeyValueStoreClientAsync, + api_public_base_url: str, + lock: asyncio.Lock, + ) -> None: + """Initialize a new instance. + + Preferably use the `ApifyKeyValueStoreClient.open` class method to create a new instance. + """ + self._api_client = api_client + """The Apify KVS client for API operations.""" + + self._api_public_base_url = api_public_base_url + """The public base URL for accessing the key-value store records.""" + + self._lock = lock + """A lock to ensure that only one operation is performed at a time.""" + + @override + async def get_metadata(self) -> ApifyKeyValueStoreMetadata: + metadata = await self._api_client.get() + return ApifyKeyValueStoreMetadata.model_validate(metadata) + + @classmethod + async def open( + cls, + *, + id: str | None, + name: str | None, + configuration: Configuration, + ) -> ApifyKeyValueStoreClient: + """Open an Apify key-value store client. + + This method creates and initializes a new instance of the Apify key-value store client. + It handles authentication, storage lookup/creation, and metadata retrieval. + + Args: + id: The ID of an existing key-value store to open. If provided, the client will connect to this specific + storage. Cannot be used together with `name`. + name: The name of a key-value store to get or create. If a storage with this name exists, it will be + opened; otherwise, a new one will be created. Cannot be used together with `id`. + configuration: The configuration object containing API credentials and settings. Must include a valid + `token` and `api_base_url`. May also contain a `default_key_value_store_id` for fallback when + neither `id` nor `name` is provided. + + Returns: + An instance for the opened or created storage client. + + Raises: + ValueError: If the configuration is missing required fields (token, api_base_url), if both `id` and `name` + are provided, or if neither `id` nor `name` is provided and no default storage ID is available + in the configuration. + """ + token = configuration.token + if not token: + raise ValueError(f'Apify storage client requires a valid token in Configuration (token={token}).') + + api_url = configuration.api_base_url + if not api_url: + raise ValueError(f'Apify storage client requires a valid API URL in Configuration (api_url={api_url}).') + + api_public_base_url = configuration.api_public_base_url + if not api_public_base_url: + raise ValueError( + 'Apify storage client requires a valid API public base URL in Configuration ' + f'(api_public_base_url={api_public_base_url}).' + ) + + # Create Apify client with the provided token and API URL. + apify_client_async = ApifyClientAsync( + token=token, + api_url=api_url, + max_retries=8, + min_delay_between_retries_millis=500, + timeout_secs=360, + ) + apify_kvss_client = apify_client_async.key_value_stores() + + # If both id and name are provided, raise an error. + if id and name: + raise ValueError('Only one of "id" or "name" can be specified, not both.') + + # If id is provided, get the storage by ID. + if id and name is None: + apify_kvs_client = apify_client_async.key_value_store(key_value_store_id=id) + + # If name is provided, get or create the storage by name. + if name and id is None: + id = ApifyKeyValueStoreMetadata.model_validate( + await apify_kvss_client.get_or_create(name=name), + ).id + apify_kvs_client = apify_client_async.key_value_store(key_value_store_id=id) + + # If both id and name are None, try to get the default storage ID from environment variables. + # The default storage ID environment variable is set by the Apify platform. It also contains + # a new storage ID after Actor's reboot or migration. + if id is None and name is None: + id = configuration.default_key_value_store_id + apify_kvs_client = apify_client_async.key_value_store(key_value_store_id=id) + + # Fetch its metadata. + metadata = await apify_kvs_client.get() + + # If metadata is None, it means the storage does not exist, so we create it. + if metadata is None: + id = ApifyKeyValueStoreMetadata.model_validate( + await apify_kvss_client.get_or_create(), + ).id + apify_kvs_client = apify_client_async.key_value_store(key_value_store_id=id) + + # Verify that the storage exists by fetching its metadata again. + metadata = await apify_kvs_client.get() + if metadata is None: + raise ValueError(f'Opening key-value store with id={id} and name={name} failed.') + + return cls( + api_client=apify_kvs_client, + api_public_base_url=api_public_base_url, + lock=asyncio.Lock(), + ) + + @override + async def purge(self) -> None: + raise NotImplementedError( + 'Purging key-value stores is not supported in the Apify platform. ' + 'Use the `drop` method to delete the key-value store instead.' + ) + + @override + async def drop(self) -> None: + async with self._lock: + await self._api_client.delete() + + @override + async def get_value(self, key: str) -> KeyValueStoreRecord | None: + response = await self._api_client.get_record(key) + return KeyValueStoreRecord.model_validate(response) if response else None + + @override + async def set_value(self, key: str, value: Any, content_type: str | None = None) -> None: + async with self._lock: + await self._api_client.set_record( + key=key, + value=value, + content_type=content_type, + ) + + @override + async def delete_value(self, key: str) -> None: + async with self._lock: + await self._api_client.delete_record(key=key) + + @override + async def iterate_keys( + self, + *, + exclusive_start_key: str | None = None, + limit: int | None = None, + ) -> AsyncIterator[KeyValueStoreRecordMetadata]: + count = 0 + + while True: + response = await self._api_client.list_keys(exclusive_start_key=exclusive_start_key) + list_key_page = KeyValueStoreListKeysPage.model_validate(response) + + for item in list_key_page.items: + # Convert KeyValueStoreKeyInfo to KeyValueStoreRecordMetadata + record_metadata = KeyValueStoreRecordMetadata( + key=item.key, + size=item.size, + content_type='application/octet-stream', # Content type not available from list_keys + ) + yield record_metadata + count += 1 + + # If we've reached the limit, stop yielding + if limit and count >= limit: + break + + # If we've reached the limit or there are no more pages, exit the loop + if (limit and count >= limit) or not list_key_page.is_truncated: + break + + exclusive_start_key = list_key_page.next_exclusive_start_key + + @override + async def record_exists(self, key: str) -> bool: + return await self._api_client.record_exists(key=key) + + async def get_public_url(self, key: str) -> str: + """Get a URL for the given key that may be used to publicly access the value in the remote key-value store. + + Args: + key: The key for which the URL should be generated. + + Returns: + A public URL that can be used to access the value of the given key in the KVS. + """ + if self._api_client.resource_id is None: + raise ValueError('resource_id cannot be None when generating a public URL') + + public_url = ( + URL(self._api_public_base_url) / 'v2' / 'key-value-stores' / self._api_client.resource_id / 'records' / key + ) + metadata = await self.get_metadata() + + if metadata.url_signing_secret_key is not None: + public_url = public_url.with_query(signature=create_hmac_signature(metadata.url_signing_secret_key, key)) + + return str(public_url) diff --git a/src/apify/storage_clients/_apify/_models.py b/src/apify/storage_clients/_apify/_models.py new file mode 100644 index 00000000..df981121 --- /dev/null +++ b/src/apify/storage_clients/_apify/_models.py @@ -0,0 +1,107 @@ +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import Annotated + +from pydantic import BaseModel, ConfigDict, Field + +from crawlee.storage_clients.models import KeyValueStoreMetadata + +from apify import Request +from apify._utils import docs_group + + +@docs_group('Storage data') +class ApifyKeyValueStoreMetadata(KeyValueStoreMetadata): + """Extended key-value store metadata model for Apify platform. + + Includes additional Apify-specific fields. + """ + + url_signing_secret_key: Annotated[str | None, Field(alias='urlSigningSecretKey', default=None)] + """The secret key used for signing URLs for secure access to key-value store records.""" + + +@docs_group('Storage data') +class ProlongRequestLockResponse(BaseModel): + """Response to prolong request lock calls.""" + + model_config = ConfigDict(populate_by_name=True) + + lock_expires_at: Annotated[datetime, Field(alias='lockExpiresAt')] + + +@docs_group('Storage data') +class RequestQueueHead(BaseModel): + """Model for request queue head. + + Represents a collection of requests retrieved from the beginning of a queue, + including metadata about the queue's state and lock information for the requests. + """ + + model_config = ConfigDict(populate_by_name=True) + + limit: Annotated[int | None, Field(alias='limit', default=None)] + """The maximum number of requests that were requested from the queue.""" + + had_multiple_clients: Annotated[bool, Field(alias='hadMultipleClients', default=False)] + """Indicates whether the queue has been accessed by multiple clients (consumers).""" + + queue_modified_at: Annotated[datetime, Field(alias='queueModifiedAt')] + """The timestamp when the queue was last modified.""" + + lock_time: Annotated[timedelta | None, Field(alias='lockSecs', default=None)] + """The duration for which the returned requests are locked and cannot be processed by other clients.""" + + queue_has_locked_requests: Annotated[bool | None, Field(alias='queueHasLockedRequests', default=False)] + """Indicates whether the queue contains any locked requests.""" + + items: Annotated[list[Request], Field(alias='items', default_factory=list[Request])] + """The list of request objects retrieved from the beginning of the queue.""" + + +class KeyValueStoreKeyInfo(BaseModel): + """Model for a key-value store key info. + + Only internal structure. + """ + + model_config = ConfigDict(populate_by_name=True) + + key: Annotated[str, Field(alias='key')] + size: Annotated[int, Field(alias='size')] + + +class KeyValueStoreListKeysPage(BaseModel): + """Model for listing keys in the key-value store. + + Only internal structure. + """ + + model_config = ConfigDict(populate_by_name=True) + + count: Annotated[int, Field(alias='count')] + limit: Annotated[int, Field(alias='limit')] + is_truncated: Annotated[bool, Field(alias='isTruncated')] + items: Annotated[list[KeyValueStoreKeyInfo], Field(alias='items', default_factory=list)] + exclusive_start_key: Annotated[str | None, Field(alias='exclusiveStartKey', default=None)] + next_exclusive_start_key: Annotated[str | None, Field(alias='nextExclusiveStartKey', default=None)] + + +class CachedRequest(BaseModel): + """Pydantic model for cached request information. + + Only internal structure. + """ + + id: str + """The ID of the request.""" + + was_already_handled: bool + """Whether the request was already handled.""" + + hydrated: Request | None = None + """The hydrated request object (the original one).""" + + lock_expires_at: datetime | None = None + """The expiration time of the lock on the request.""" diff --git a/src/apify/storage_clients/_apify/_request_queue_client.py b/src/apify/storage_clients/_apify/_request_queue_client.py new file mode 100644 index 00000000..519cd95a --- /dev/null +++ b/src/apify/storage_clients/_apify/_request_queue_client.py @@ -0,0 +1,708 @@ +from __future__ import annotations + +import asyncio +from collections import deque +from datetime import datetime, timedelta, timezone +from logging import getLogger +from typing import TYPE_CHECKING, Final + +from cachetools import LRUCache +from typing_extensions import override + +from apify_client import ApifyClientAsync +from crawlee._utils.requests import unique_key_to_request_id +from crawlee.storage_clients._base import RequestQueueClient +from crawlee.storage_clients.models import AddRequestsResponse, ProcessedRequest, RequestQueueMetadata + +from ._models import CachedRequest, ProlongRequestLockResponse, RequestQueueHead +from apify import Request + +if TYPE_CHECKING: + from collections.abc import Sequence + + from apify_client.clients import RequestQueueClientAsync + + from apify import Configuration + +logger = getLogger(__name__) + + +class ApifyRequestQueueClient(RequestQueueClient): + """An Apify platform implementation of the request queue client.""" + + _DEFAULT_LOCK_TIME: Final[timedelta] = timedelta(minutes=3) + """The default lock time for requests in the queue.""" + + _MAX_CACHED_REQUESTS: Final[int] = 1_000_000 + """Maximum number of requests that can be cached.""" + + def __init__( + self, + *, + api_client: RequestQueueClientAsync, + id: str, + name: str | None, + total_request_count: int, + handled_request_count: int, + ) -> None: + """Initialize a new instance. + + Preferably use the `ApifyRequestQueueClient.open` class method to create a new instance. + """ + self._api_client = api_client + """The Apify request queue client for API operations.""" + + self._id = id + """The ID of the request queue.""" + + self._name = name + """The name of the request queue.""" + + self._queue_head = deque[str]() + """A deque to store request IDs in the queue head.""" + + self._requests_cache: LRUCache[str, CachedRequest] = LRUCache(maxsize=self._MAX_CACHED_REQUESTS) + """A cache to store request objects. Request ID is used as the cache key.""" + + self._queue_has_locked_requests: bool | None = None + """Whether the queue has requests locked by another client.""" + + self._should_check_for_forefront_requests = False + """Whether to check for forefront requests in the next list_head call.""" + + self._had_multiple_clients = False + """Whether the request queue has been accessed by multiple clients.""" + + self._initial_total_count = total_request_count + """The initial total request count (from the API) when the queue was opened.""" + + self._initial_handled_count = handled_request_count + """The initial handled request count (from the API) when the queue was opened.""" + + self._assumed_total_count = 0 + """The number of requests we assume are in the queue (tracked manually for this instance).""" + + self._assumed_handled_count = 0 + """The number of requests we assume have been handled (tracked manually for this instance).""" + + self._fetch_lock = asyncio.Lock() + """Fetch lock to minimize race conditions when communicating with API.""" + + @override + async def get_metadata(self) -> RequestQueueMetadata: + total_count = self._initial_total_count + self._assumed_total_count + handled_count = self._initial_handled_count + self._assumed_handled_count + pending_count = total_count - handled_count + + return RequestQueueMetadata( + id=self._id, + name=self._name, + total_request_count=total_count, + handled_request_count=handled_count, + pending_request_count=pending_count, + created_at=datetime.now(timezone.utc), + modified_at=datetime.now(timezone.utc), + accessed_at=datetime.now(timezone.utc), + had_multiple_clients=self._had_multiple_clients, + ) + + @classmethod + async def open( + cls, + *, + id: str | None, + name: str | None, + configuration: Configuration, + ) -> ApifyRequestQueueClient: + """Open an Apify request queue client. + + This method creates and initializes a new instance of the Apify request queue client. It handles + authentication, storage lookup/creation, and metadata retrieval, and sets up internal caching and queue + management structures. + + Args: + id: The ID of an existing request queue to open. If provided, the client will connect to this specific + storage. Cannot be used together with `name`. + name: The name of a request queue to get or create. If a storage with this name exists, it will be opened; + otherwise, a new one will be created. Cannot be used together with `id`. + configuration: The configuration object containing API credentials and settings. Must include a valid + `token` and `api_base_url`. May also contain a `default_request_queue_id` for fallback when neither + `id` nor `name` is provided. + + Returns: + An instance for the opened or created storage client. + + Raises: + ValueError: If the configuration is missing required fields (token, api_base_url), if both `id` and `name` + are provided, or if neither `id` nor `name` is provided and no default storage ID is available + in the configuration. + """ + token = configuration.token + if not token: + raise ValueError(f'Apify storage client requires a valid token in Configuration (token={token}).') + + api_url = configuration.api_base_url + if not api_url: + raise ValueError(f'Apify storage client requires a valid API URL in Configuration (api_url={api_url}).') + + api_public_base_url = configuration.api_public_base_url + if not api_public_base_url: + raise ValueError( + 'Apify storage client requires a valid API public base URL in Configuration ' + f'(api_public_base_url={api_public_base_url}).' + ) + + # Create Apify client with the provided token and API URL. + apify_client_async = ApifyClientAsync( + token=token, + api_url=api_url, + max_retries=8, + min_delay_between_retries_millis=500, + timeout_secs=360, + ) + apify_rqs_client = apify_client_async.request_queues() + + # If both id and name are provided, raise an error. + if id and name: + raise ValueError('Only one of "id" or "name" can be specified, not both.') + + # If id is provided, get the storage by ID. + if id and name is None: + apify_rq_client = apify_client_async.request_queue(request_queue_id=id) + + # If name is provided, get or create the storage by name. + if name and id is None: + id = RequestQueueMetadata.model_validate( + await apify_rqs_client.get_or_create(name=name), + ).id + apify_rq_client = apify_client_async.request_queue(request_queue_id=id) + + # If both id and name are None, try to get the default storage ID from environment variables. + # The default storage ID environment variable is set by the Apify platform. It also contains + # a new storage ID after Actor's reboot or migration. + if id is None and name is None: + id = configuration.default_request_queue_id + apify_rq_client = apify_client_async.request_queue(request_queue_id=id) + + # Fetch its metadata. + metadata = await apify_rq_client.get() + + # If metadata is None, it means the storage does not exist, so we create it. + if metadata is None: + id = RequestQueueMetadata.model_validate( + await apify_rqs_client.get_or_create(), + ).id + apify_rq_client = apify_client_async.request_queue(request_queue_id=id) + + # Verify that the storage exists by fetching its metadata again. + metadata = await apify_rq_client.get() + if metadata is None: + raise ValueError(f'Opening request queue with id={id} and name={name} failed.') + + metadata_model = RequestQueueMetadata.model_validate( + await apify_rqs_client.get_or_create(), + ) + + # Ensure we have a valid ID. + if id is None: + raise ValueError('Request queue ID cannot be None.') + + return cls( + api_client=apify_rq_client, + id=id, + name=name, + total_request_count=metadata_model.total_request_count, + handled_request_count=metadata_model.handled_request_count, + ) + + @override + async def purge(self) -> None: + raise NotImplementedError( + 'Purging the request queue is not supported in the Apify platform. ' + 'Use the `drop` method to delete the request queue instead.' + ) + + @override + async def drop(self) -> None: + await self._api_client.delete() + + @override + async def add_batch_of_requests( + self, + requests: Sequence[Request], + *, + forefront: bool = False, + ) -> AddRequestsResponse: + """Add a batch of requests to the queue. + + Args: + requests: The requests to add. + forefront: Whether to add the requests to the beginning of the queue. + + Returns: + Response containing information about the added requests. + """ + # Prepare requests for API by converting to dictionaries. + requests_dict = [ + request.model_dump( + by_alias=True, + exclude={'id'}, # Exclude ID fields from requests since the API doesn't accept them. + ) + for request in requests + ] + + # Send requests to API. + response = await self._api_client.batch_add_requests(requests=requests_dict, forefront=forefront) + + # Update assumed total count for newly added requests. + api_response = AddRequestsResponse.model_validate(response) + new_request_count = 0 + for processed_request in api_response.processed_requests: + if not processed_request.was_already_present and not processed_request.was_already_handled: + new_request_count += 1 + + self._assumed_total_count += new_request_count + + return api_response + + @override + async def get_request(self, request_id: str) -> Request | None: + """Get a request by ID. + + Args: + request_id: The ID of the request to get. + + Returns: + The request or None if not found. + """ + response = await self._api_client.get_request(request_id) + + if response is None: + return None + + return Request.model_validate(response) + + @override + async def fetch_next_request(self) -> Request | None: + """Return the next request in the queue to be processed. + + Once you successfully finish processing of the request, you need to call `mark_request_as_handled` + to mark the request as handled in the queue. If there was some error in processing the request, call + `reclaim_request` instead, so that the queue will give the request to some other consumer + in another call to the `fetch_next_request` method. + + Returns: + The request or `None` if there are no more pending requests. + """ + # Ensure the queue head has requests if available. Fetching the head with lock to prevent race conditions. + async with self._fetch_lock: + await self._ensure_head_is_non_empty() + + # If queue head is empty after ensuring, there are no requests + if not self._queue_head: + return None + + # Get the next request ID from the queue head + next_request_id = self._queue_head.popleft() + + request = await self._get_or_hydrate_request(next_request_id) + + # Handle potential inconsistency where request might not be in the main table yet + if request is None: + logger.debug( + 'Cannot find a request from the beginning of queue, will be retried later', + extra={'nextRequestId': next_request_id}, + ) + return None + + # If the request was already handled, skip it + if request.handled_at is not None: + logger.debug( + 'Request fetched from the beginning of queue was already handled', + extra={'nextRequestId': next_request_id}, + ) + return None + + # Use get request to ensure we have the full request object. + request = await self.get_request(request.id) + if request is None: + logger.debug( + 'Request fetched from the beginning of queue was not found in the RQ', + extra={'nextRequestId': next_request_id}, + ) + return None + + return request + + @override + async def mark_request_as_handled(self, request: Request) -> ProcessedRequest | None: + """Mark a request as handled after successful processing. + + Handled requests will never again be returned by the `fetch_next_request` method. + + Args: + request: The request to mark as handled. + + Returns: + Information about the queue operation. `None` if the given request was not in progress. + """ + # Set the handled_at timestamp if not already set + if request.handled_at is None: + request.handled_at = datetime.now(tz=timezone.utc) + + if cached_request := self._requests_cache[request.id]: + cached_request.was_already_handled = request.was_already_handled + try: + # Update the request in the API + processed_request = await self._update_request(request) + processed_request.unique_key = request.unique_key + + # Update assumed handled count if this wasn't already handled + if not processed_request.was_already_handled: + self._assumed_handled_count += 1 + + # Update the cache with the handled request + cache_key = unique_key_to_request_id(request.unique_key) + self._cache_request( + cache_key, + processed_request, + hydrated_request=request, + ) + except Exception as exc: + logger.debug(f'Error marking request {request.id} as handled: {exc!s}') + return None + else: + return processed_request + + @override + async def reclaim_request( + self, + request: Request, + *, + forefront: bool = False, + ) -> ProcessedRequest | None: + """Reclaim a failed request back to the queue. + + The request will be returned for processing later again by another call to `fetch_next_request`. + + Args: + request: The request to return to the queue. + forefront: Whether to add the request to the head or the end of the queue. + + Returns: + Information about the queue operation. `None` if the given request was not in progress. + """ + # Check if the request was marked as handled and clear it. When reclaiming, + # we want to put the request back for processing. + if request.was_already_handled: + request.handled_at = None + + # Reclaim with lock to prevent race conditions that could lead to double processing of the same request. + async with self._fetch_lock: + try: + # Update the request in the API. + processed_request = await self._update_request(request, forefront=forefront) + processed_request.unique_key = request.unique_key + + # If the request was previously handled, decrement our handled count since + # we're putting it back for processing. + if request.was_already_handled and not processed_request.was_already_handled: + self._assumed_handled_count -= 1 + + # Update the cache + cache_key = unique_key_to_request_id(request.unique_key) + self._cache_request( + cache_key, + processed_request, + hydrated_request=request, + ) + + # If we're adding to the forefront, we need to check for forefront requests + # in the next list_head call + if forefront: + self._should_check_for_forefront_requests = True + + # Try to release the lock on the request + try: + await self._delete_request_lock(request.id, forefront=forefront) + except Exception as err: + logger.debug(f'Failed to delete request lock for request {request.id}', exc_info=err) + except Exception as exc: + logger.debug(f'Error reclaiming request {request.id}: {exc!s}') + return None + else: + return processed_request + + @override + async def is_empty(self) -> bool: + """Check if the queue is empty. + + Returns: + True if the queue is empty, False otherwise. + """ + # Check _list_head and self._queue_has_locked_requests with lock to make sure they are consistent. + # Without the lock the `is_empty` is prone to falsely report True with some low probability race condition. + async with self._fetch_lock: + head = await self._list_head(limit=1, lock_time=None) + return len(head.items) == 0 and not self._queue_has_locked_requests + + async def _ensure_head_is_non_empty(self) -> None: + """Ensure that the queue head has requests if they are available in the queue.""" + # If queue head has adequate requests, skip fetching more + if len(self._queue_head) > 1 and not self._should_check_for_forefront_requests: + return + + # Fetch requests from the API and populate the queue head + await self._list_head(lock_time=self._DEFAULT_LOCK_TIME) + + async def _get_or_hydrate_request(self, request_id: str) -> Request | None: + """Get a request by ID, either from cache or by fetching from API. + + Args: + request_id: The ID of the request to get. + + Returns: + The request if found and valid, otherwise None. + """ + # First check if the request is in our cache + cached_entry = self._requests_cache.get(request_id) + + if cached_entry and cached_entry.hydrated: + # If we have the request hydrated in cache, check if lock is expired + if cached_entry.lock_expires_at and cached_entry.lock_expires_at < datetime.now(tz=timezone.utc): + # Try to prolong the lock if it's expired + try: + lock_secs = int(self._DEFAULT_LOCK_TIME.total_seconds()) + response = await self._prolong_request_lock(request_id, lock_secs=lock_secs) + cached_entry.lock_expires_at = response.lock_expires_at + except Exception: + # If prolonging the lock fails, we lost the request + logger.debug(f'Failed to prolong lock for request {request_id}, returning None') + return None + + return cached_entry.hydrated + + # If not in cache or not hydrated, fetch the request + try: + # Try to acquire or prolong the lock + lock_secs = int(self._DEFAULT_LOCK_TIME.total_seconds()) + await self._prolong_request_lock(request_id, lock_secs=lock_secs) + + # Fetch the request data + request = await self.get_request(request_id) + + # If request is not found, release lock and return None + if not request: + await self._delete_request_lock(request_id) + return None + + # Update cache with hydrated request + cache_key = unique_key_to_request_id(request.unique_key) + self._cache_request( + cache_key, + ProcessedRequest( + id=request_id, + unique_key=request.unique_key, + was_already_present=True, + was_already_handled=request.handled_at is not None, + ), + hydrated_request=request, + ) + except Exception as exc: + logger.debug(f'Error fetching or locking request {request_id}: {exc!s}') + return None + else: + return request + + async def _update_request( + self, + request: Request, + *, + forefront: bool = False, + ) -> ProcessedRequest: + """Update a request in the queue. + + Args: + request: The updated request. + forefront: Whether to put the updated request in the beginning or the end of the queue. + + Returns: + The updated request + """ + response = await self._api_client.update_request( + request=request.model_dump(by_alias=True), + forefront=forefront, + ) + + return ProcessedRequest.model_validate( + {'id': request.id, 'uniqueKey': request.unique_key} | response, + ) + + async def _list_head( + self, + *, + lock_time: timedelta | None = None, + limit: int = 25, + ) -> RequestQueueHead: + """Retrieve requests from the beginning of the queue. + + Args: + lock_time: Duration for which to lock the retrieved requests. + If None, requests will not be locked. + limit: Maximum number of requests to retrieve. + + Returns: + A collection of requests from the beginning of the queue. + """ + # Return from cache if available and we're not checking for new forefront requests + if self._queue_head and not self._should_check_for_forefront_requests: + logger.debug(f'Using cached queue head with {len(self._queue_head)} requests') + # Create a list of requests from the cached queue head + items = [] + for request_id in list(self._queue_head)[:limit]: + cached_request = self._requests_cache.get(request_id) + if cached_request and cached_request.hydrated: + items.append(cached_request.hydrated) + + metadata = await self.get_metadata() + + return RequestQueueHead( + limit=limit, + had_multiple_clients=metadata.had_multiple_clients, + queue_modified_at=metadata.modified_at, + items=items, + queue_has_locked_requests=self._queue_has_locked_requests, + lock_time=lock_time, + ) + leftover_buffer = list[str]() + if self._should_check_for_forefront_requests: + leftover_buffer = list(self._queue_head) + self._queue_head.clear() + self._should_check_for_forefront_requests = False + + # Otherwise fetch from API + lock_time = lock_time or self._DEFAULT_LOCK_TIME + lock_secs = int(lock_time.total_seconds()) + + response = await self._api_client.list_and_lock_head( + lock_secs=lock_secs, + limit=limit, + ) + + # Update the queue head cache + self._queue_has_locked_requests = response.get('queueHasLockedRequests', False) + + for request_data in response.get('items', []): + request = Request.model_validate(request_data) + + # Skip requests without ID or unique key + if not request.id or not request.unique_key: + logger.debug( + 'Skipping request from queue head, missing ID or unique key', + extra={ + 'id': request.id, + 'unique_key': request.unique_key, + }, + ) + continue + + # Cache the request + self._cache_request( + unique_key_to_request_id(request.unique_key), + ProcessedRequest( + id=request.id, + unique_key=request.unique_key, + was_already_present=True, + was_already_handled=False, + ), + hydrated_request=request, + ) + self._queue_head.append(request.id) + + for leftover_request_id in leftover_buffer: + # After adding new requests to the forefront, any existing leftover locked request is kept in the end. + self._queue_head.append(leftover_request_id) + return RequestQueueHead.model_validate(response) + + async def _prolong_request_lock( + self, + request_id: str, + *, + lock_secs: int, + ) -> ProlongRequestLockResponse: + """Prolong the lock on a specific request in the queue. + + Args: + request_id: The identifier of the request whose lock is to be prolonged. + lock_secs: The additional amount of time, in seconds, that the request will remain locked. + + Returns: + A response containing the time at which the lock will expire. + """ + response = await self._api_client.prolong_request_lock( + request_id=request_id, + # All requests reaching this code were the tip of the queue at the moment when they were fetched, + # so if their lock expires, they should be put back to the forefront as their handling is long overdue. + forefront=True, + lock_secs=lock_secs, + ) + + result = ProlongRequestLockResponse( + lock_expires_at=datetime.fromisoformat(response['lockExpiresAt'].replace('Z', '+00:00')) + ) + + # Update the cache with the new lock expiration + for cached_request in self._requests_cache.values(): + if cached_request.id == request_id: + cached_request.lock_expires_at = result.lock_expires_at + break + + return result + + async def _delete_request_lock( + self, + request_id: str, + *, + forefront: bool = False, + ) -> None: + """Delete the lock on a specific request in the queue. + + Args: + request_id: ID of the request to delete the lock. + forefront: Whether to put the request in the beginning or the end of the queue after the lock is deleted. + """ + try: + await self._api_client.delete_request_lock( + request_id=request_id, + forefront=forefront, + ) + + # Update the cache to remove the lock + for cached_request in self._requests_cache.values(): + if cached_request.id == request_id: + cached_request.lock_expires_at = None + break + except Exception as err: + logger.debug(f'Failed to delete request lock for request {request_id}', exc_info=err) + + def _cache_request( + self, + cache_key: str, + processed_request: ProcessedRequest, + *, + hydrated_request: Request | None = None, + ) -> None: + """Cache a request for future use. + + Args: + cache_key: The key to use for caching the request. It should be request ID. + processed_request: The processed request information. + forefront: Whether the request was added to the forefront of the queue. + hydrated_request: The hydrated request object, if available. + """ + self._requests_cache[cache_key] = CachedRequest( + id=processed_request.id, + was_already_handled=processed_request.was_already_handled, + hydrated=hydrated_request, + lock_expires_at=None, + ) diff --git a/src/apify/storage_clients/_apify/_storage_client.py b/src/apify/storage_clients/_apify/_storage_client.py new file mode 100644 index 00000000..689e2c77 --- /dev/null +++ b/src/apify/storage_clients/_apify/_storage_client.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from typing_extensions import override + +from crawlee.storage_clients._base import StorageClient + +from ._dataset_client import ApifyDatasetClient +from ._key_value_store_client import ApifyKeyValueStoreClient +from ._request_queue_client import ApifyRequestQueueClient +from apify._utils import docs_group + +if TYPE_CHECKING: + from crawlee.configuration import Configuration + + +@docs_group('Storage clients') +class ApifyStorageClient(StorageClient): + """Apify storage client.""" + + @override + async def create_dataset_client( + self, + *, + id: str | None = None, + name: str | None = None, + configuration: Configuration | None = None, + ) -> ApifyDatasetClient: + # Import here to avoid circular imports. + from apify import Configuration as ApifyConfiguration # noqa: PLC0415 + + configuration = configuration or ApifyConfiguration.get_global_configuration() + if isinstance(configuration, ApifyConfiguration): + return await ApifyDatasetClient.open(id=id, name=name, configuration=configuration) + + raise TypeError( + f'Expected "configuration" to be an instance of "apify.Configuration", ' + f'but got {type(configuration).__name__} instead.' + ) + + @override + async def create_kvs_client( + self, + *, + id: str | None = None, + name: str | None = None, + configuration: Configuration | None = None, + ) -> ApifyKeyValueStoreClient: + # Import here to avoid circular imports. + from apify import Configuration as ApifyConfiguration # noqa: PLC0415 + + configuration = configuration or ApifyConfiguration.get_global_configuration() + if isinstance(configuration, ApifyConfiguration): + return await ApifyKeyValueStoreClient.open(id=id, name=name, configuration=configuration) + + raise TypeError( + f'Expected "configuration" to be an instance of "apify.Configuration", ' + f'but got {type(configuration).__name__} instead.' + ) + + @override + async def create_rq_client( + self, + *, + id: str | None = None, + name: str | None = None, + configuration: Configuration | None = None, + ) -> ApifyRequestQueueClient: + # Import here to avoid circular imports. + from apify import Configuration as ApifyConfiguration # noqa: PLC0415 + + configuration = configuration or ApifyConfiguration.get_global_configuration() + if isinstance(configuration, ApifyConfiguration): + return await ApifyRequestQueueClient.open(id=id, name=name, configuration=configuration) + + raise TypeError( + f'Expected "configuration" to be an instance of "apify.Configuration", ' + f'but got {type(configuration).__name__} instead.' + ) diff --git a/src/apify/storage_clients/_apify/py.typed b/src/apify/storage_clients/_apify/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/src/apify/storage_clients/_file_system/__init__.py b/src/apify/storage_clients/_file_system/__init__.py new file mode 100644 index 00000000..b18af53b --- /dev/null +++ b/src/apify/storage_clients/_file_system/__init__.py @@ -0,0 +1,2 @@ +from ._key_value_store_client import ApifyFileSystemKeyValueStoreClient +from ._storage_client import ApifyFileSystemStorageClient diff --git a/src/apify/storage_clients/_file_system/_key_value_store_client.py b/src/apify/storage_clients/_file_system/_key_value_store_client.py new file mode 100644 index 00000000..d0b882c8 --- /dev/null +++ b/src/apify/storage_clients/_file_system/_key_value_store_client.py @@ -0,0 +1,36 @@ +import asyncio + +from typing_extensions import override + +from crawlee._consts import METADATA_FILENAME +from crawlee.storage_clients._file_system import FileSystemKeyValueStoreClient + +from apify._configuration import Configuration + + +class ApifyFileSystemKeyValueStoreClient(FileSystemKeyValueStoreClient): + """Apify-specific implementation of the `FileSystemKeyValueStoreClient`. + + The only difference is that it overrides the `purge` method to delete all files in the key-value store + directory, except for the metadata file and the `INPUT.json` file. + """ + + @override + async def purge(self) -> None: + """Purges the key-value store by deleting all its contents. + + It deletes all files in the key-value store directory, except for the metadata file and + the `INPUT.json` file. It also updates the metadata to reflect that the store has been purged. + """ + kvs_input_key = Configuration.get_global_configuration().input_key + async with self._lock: + for file_path in self.path_to_kvs.glob('*'): + if file_path.name in {METADATA_FILENAME, f'{kvs_input_key}.json'}: + continue + if file_path.is_file(): + await asyncio.to_thread(file_path.unlink, missing_ok=True) + + await self._update_metadata( + update_accessed_at=True, + update_modified_at=True, + ) diff --git a/src/apify/storage_clients/_file_system/_storage_client.py b/src/apify/storage_clients/_file_system/_storage_client.py new file mode 100644 index 00000000..403943e3 --- /dev/null +++ b/src/apify/storage_clients/_file_system/_storage_client.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from typing_extensions import override + +from crawlee.configuration import Configuration +from crawlee.storage_clients import FileSystemStorageClient + +from ._key_value_store_client import ApifyFileSystemKeyValueStoreClient + +if TYPE_CHECKING: + from crawlee.storage_clients._file_system import FileSystemKeyValueStoreClient + + +class ApifyFileSystemStorageClient(FileSystemStorageClient): + """Apify-specific implementation of the file system storage client. + + The only difference is that it uses `ApifyFileSystemKeyValueStoreClient` for key-value stores, + which overrides the `purge` method to delete all files in the key-value store directory + except for the metadata file and the `INPUT.json` file. + """ + + @override + async def create_kvs_client( + self, + *, + id: str | None = None, + name: str | None = None, + configuration: Configuration | None = None, + ) -> FileSystemKeyValueStoreClient: + configuration = configuration or Configuration.get_global_configuration() + client = await ApifyFileSystemKeyValueStoreClient.open(id=id, name=name, configuration=configuration) + await self._purge_if_needed(client, configuration) + return client diff --git a/src/apify/storage_clients/py.typed b/src/apify/storage_clients/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/src/apify/storages/__init__.py b/src/apify/storages/__init__.py index 3cd0dfe8..2ed85e84 100644 --- a/src/apify/storages/__init__.py +++ b/src/apify/storages/__init__.py @@ -1,5 +1,3 @@ from crawlee.storages import Dataset, KeyValueStore, RequestQueue -from ._request_list import RequestList - -__all__ = ['Dataset', 'KeyValueStore', 'RequestList', 'RequestQueue'] +__all__ = ['Dataset', 'KeyValueStore', 'RequestQueue'] diff --git a/tests/integration/actor_source_base/Dockerfile b/tests/integration/actor_source_base/Dockerfile index 026b4fb3..1e5df612 100644 --- a/tests/integration/actor_source_base/Dockerfile +++ b/tests/integration/actor_source_base/Dockerfile @@ -3,6 +3,10 @@ FROM apify/actor-python:BASE_IMAGE_VERSION_PLACEHOLDER COPY . ./ +RUN apt-get update && apt-get install -y \ + git \ + && rm -rf /var/lib/apt/lists/* + RUN echo "Python version:" \ && python --version \ && echo "Pip version:" \ diff --git a/tests/integration/actor_source_base/requirements.txt b/tests/integration/actor_source_base/requirements.txt index fe77c2dc..66a782ba 100644 --- a/tests/integration/actor_source_base/requirements.txt +++ b/tests/integration/actor_source_base/requirements.txt @@ -1,4 +1,4 @@ # The test fixture will put the Apify SDK wheel path on the next line APIFY_SDK_WHEEL_PLACEHOLDER uvicorn[standard] -crawlee[parsel] +crawlee[parsel] @ git+https://github.com/apify/crawlee-python.git@master diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 4cfb76ec..07b6c758 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -15,7 +15,6 @@ from apify_client import ApifyClient, ApifyClientAsync from apify_shared.consts import ActorJobStatus, ActorSourceType, ApifyEnvVars from crawlee import service_locator -from crawlee.storages import _creation_management import apify._actor from ._utils import generate_unique_resource_name @@ -53,24 +52,16 @@ def _prepare_test_env() -> None: # Set the environment variable for the local storage directory to the temporary path. monkeypatch.setenv(ApifyEnvVars.LOCAL_STORAGE_DIR, str(tmp_path)) - # Reset the flags in the service locator to indicate that no services are explicitly set. This ensures - # a clean state, as services might have been set during a previous test and not reset properly. - service_locator._configuration_was_retrieved = False - service_locator._storage_client_was_retrieved = False - service_locator._event_manager_was_retrieved = False - # Reset the services in the service locator. service_locator._configuration = None service_locator._event_manager = None service_locator._storage_client = None + service_locator._storage_instance_manager = None - # Clear creation-related caches to ensure no state is carried over between tests. - monkeypatch.setattr(_creation_management, '_cache_dataset_by_id', {}) - monkeypatch.setattr(_creation_management, '_cache_dataset_by_name', {}) - monkeypatch.setattr(_creation_management, '_cache_kvs_by_id', {}) - monkeypatch.setattr(_creation_management, '_cache_kvs_by_name', {}) - monkeypatch.setattr(_creation_management, '_cache_rq_by_id', {}) - monkeypatch.setattr(_creation_management, '_cache_rq_by_name', {}) + # Reset the retrieval flags. + service_locator._configuration_was_retrieved = False + service_locator._event_manager_was_retrieved = False + service_locator._storage_client_was_retrieved = False # Verify that the test environment was set up correctly. assert os.environ.get(ApifyEnvVars.LOCAL_STORAGE_DIR) == str(tmp_path) diff --git a/tests/integration/test_actor_api_helpers.py b/tests/integration/test_actor_api_helpers.py index c4520a85..93ce502f 100644 --- a/tests/integration/test_actor_api_helpers.py +++ b/tests/integration/test_actor_api_helpers.py @@ -46,9 +46,6 @@ async def main() -> None: assert len(env_dict.get('actor_id', '')) == 17 assert len(env_dict.get('actor_run_id', '')) == 17 assert len(env_dict.get('user_id', '')) == 17 - assert len(env_dict.get('default_dataset_id', '')) == 17 - assert len(env_dict.get('default_key_value_store_id', '')) == 17 - assert len(env_dict.get('default_request_queue_id', '')) == 17 actor = await make_actor(label='get-env', main_func=main) run_result = await run_actor(actor) diff --git a/tests/integration/test_actor_dataset.py b/tests/integration/test_actor_dataset.py index 20a71750..1cce4fd9 100644 --- a/tests/integration/test_actor_dataset.py +++ b/tests/integration/test_actor_dataset.py @@ -104,8 +104,9 @@ async def main() -> None: dataset_by_name_2 = await Actor.open_dataset(name=dataset_name) assert dataset_by_name_1 is dataset_by_name_2 - dataset_by_id_1 = await Actor.open_dataset(id=dataset_by_name_1._id) - dataset_by_id_2 = await Actor.open_dataset(id=dataset_by_name_1._id) + dataset_1_metadata = await dataset_by_name_1.get_metadata() + dataset_by_id_1 = await Actor.open_dataset(id=dataset_1_metadata.id) + dataset_by_id_2 = await Actor.open_dataset(id=dataset_1_metadata.id) assert dataset_by_id_1 is dataset_by_name_1 assert dataset_by_id_2 is dataset_by_id_1 @@ -129,7 +130,7 @@ async def test_force_cloud( async with Actor: dataset = await Actor.open_dataset(name=dataset_name, force_cloud=True) - dataset_id = dataset._id + dataset_id = (await dataset.get_metadata()).id await dataset.push_data(dataset_item) diff --git a/tests/integration/test_actor_key_value_store.py b/tests/integration/test_actor_key_value_store.py index 6b6dd767..799cbea3 100644 --- a/tests/integration/test_actor_key_value_store.py +++ b/tests/integration/test_actor_key_value_store.py @@ -45,8 +45,9 @@ async def main() -> None: kvs_by_name_2 = await Actor.open_key_value_store(name=kvs_name) assert kvs_by_name_1 is kvs_by_name_2 - kvs_by_id_1 = await Actor.open_key_value_store(id=kvs_by_name_1._id) - kvs_by_id_2 = await Actor.open_key_value_store(id=kvs_by_name_1._id) + kvs_1_metadata = await kvs_by_name_1.get_metadata() + kvs_by_id_1 = await Actor.open_key_value_store(id=kvs_1_metadata.id) + kvs_by_id_2 = await Actor.open_key_value_store(id=kvs_1_metadata.id) assert kvs_by_id_1 is kvs_by_name_1 assert kvs_by_id_2 is kvs_by_id_1 @@ -69,7 +70,7 @@ async def test_force_cloud( async with Actor: key_value_store = await Actor.open_key_value_store(name=key_value_store_name, force_cloud=True) - key_value_store_id = key_value_store._id + key_value_store_id = (await key_value_store.get_metadata()).id await key_value_store.set_value('foo', 'bar') @@ -202,28 +203,29 @@ async def test_generate_public_url_for_kvs_record( ) -> None: async def main() -> None: from apify._crypto import create_hmac_signature + from apify.storage_clients._apify._models import ApifyKeyValueStoreMetadata async with Actor: public_api_url = Actor.config.api_public_base_url - default_store_id = Actor.config.default_key_value_store_id + default_kvs_id = Actor.config.default_key_value_store_id record_key = 'public-record-key' - store = await Actor.open_key_value_store() + kvs = await Actor.open_key_value_store() + metadata = await kvs.get_metadata() - assert isinstance(store.storage_object.model_extra, dict) - url_signing_secret_key = store.storage_object.model_extra.get('urlSigningSecretKey') - assert url_signing_secret_key is not None + assert isinstance(metadata, ApifyKeyValueStoreMetadata) + assert metadata.url_signing_secret_key is not None - await store.set_value(record_key, {'exposedData': 'test'}, 'application/json') + await kvs.set_value(record_key, {'exposedData': 'test'}, 'application/json') - record_url = await store.get_public_url(record_key) - - signature = create_hmac_signature(url_signing_secret_key, record_key) - assert ( - record_url - == f'{public_api_url}/v2/key-value-stores/{default_store_id}/records/{record_key}?signature={signature}' + record_url = await kvs.get_public_url(record_key) + signature = create_hmac_signature(metadata.url_signing_secret_key, record_key) + expected_record_url = ( + f'{public_api_url}/v2/key-value-stores/{default_kvs_id}/records/{record_key}?signature={signature}' ) + assert record_url == expected_record_url + actor = await make_actor(label='kvs-get-public-url', main_func=main) run_result = await run_actor(actor) diff --git a/tests/integration/test_actor_request_queue.py b/tests/integration/test_actor_request_queue.py index 06e8529e..64a846b5 100644 --- a/tests/integration/test_actor_request_queue.py +++ b/tests/integration/test_actor_request_queue.py @@ -3,10 +3,9 @@ from typing import TYPE_CHECKING from apify_shared.consts import ApifyEnvVars -from crawlee import Request from ._utils import generate_unique_resource_name -from apify import Actor +from apify import Actor, Request if TYPE_CHECKING: import pytest @@ -46,8 +45,9 @@ async def main() -> None: rq_by_name_2 = await Actor.open_request_queue(name=rq_name) assert rq_by_name_1 is rq_by_name_2 - rq_by_id_1 = await Actor.open_request_queue(id=rq_by_name_1._id) - rq_by_id_2 = await Actor.open_request_queue(id=rq_by_name_1._id) + rq_1_metadata = await rq_by_name_1.get_metadata() + rq_by_id_1 = await Actor.open_request_queue(id=rq_1_metadata.id) + rq_by_id_2 = await Actor.open_request_queue(id=rq_1_metadata.id) assert rq_by_id_1 is rq_by_name_1 assert rq_by_id_2 is rq_by_id_1 @@ -70,7 +70,7 @@ async def test_force_cloud( async with Actor: request_queue = await Actor.open_request_queue(name=request_queue_name, force_cloud=True) - request_queue_id = request_queue._id + request_queue_id = (await request_queue.get_metadata()).id request_info = await request_queue.add_request(Request.from_url('http://example.com')) @@ -86,3 +86,30 @@ async def test_force_cloud( assert request_queue_request['url'] == 'http://example.com' finally: await request_queue_client.delete() + + +async def test_request_queue_is_finished( + apify_client_async: ApifyClientAsync, + monkeypatch: pytest.MonkeyPatch, +) -> None: + assert apify_client_async.token is not None + monkeypatch.setenv(ApifyEnvVars.TOKEN, apify_client_async.token) + + request_queue_name = generate_unique_resource_name('request_queue') + + async with Actor: + try: + request_queue = await Actor.open_request_queue(name=request_queue_name, force_cloud=True) + await request_queue.add_request(Request.from_url('http://example.com')) + assert not await request_queue.is_finished() + + request = await request_queue.fetch_next_request() + assert request is not None + assert not await request_queue.is_finished(), ( + 'RequestQueue should not be finished unless the request is marked as handled.' + ) + + await request_queue.mark_request_as_handled(request) + assert await request_queue.is_finished() + finally: + await request_queue.drop() diff --git a/tests/integration/test_crawlers_with_storages.py b/tests/integration/test_crawlers_with_storages.py index 3dd32707..a2ba1e4d 100644 --- a/tests/integration/test_crawlers_with_storages.py +++ b/tests/integration/test_crawlers_with_storages.py @@ -2,8 +2,6 @@ from typing import TYPE_CHECKING -import pytest - if TYPE_CHECKING: from .conftest import MakeActorFunction, RunActorFunction @@ -78,7 +76,6 @@ async def default_handler(context: ParselCrawlingContext) -> None: assert run_result.status == 'SUCCEEDED' -@pytest.mark.skip(reason='Sometimes crawler does not respect max_request_retries argument, see issue #540') async def test_actor_on_platform_max_request_retries( make_actor: MakeActorFunction, run_actor: RunActorFunction, @@ -87,6 +84,7 @@ async def test_actor_on_platform_max_request_retries( async def main() -> None: """The crawler entry point.""" + from crawlee.crawlers import BasicCrawlingContext, ParselCrawler, ParselCrawlingContext from apify import Actor diff --git a/tests/integration/test_request_queue.py b/tests/integration/test_request_queue.py index 9840c358..fe9c50e5 100644 --- a/tests/integration/test_request_queue.py +++ b/tests/integration/test_request_queue.py @@ -64,7 +64,7 @@ async def main() -> None: Actor.log.info('Request queue opened') # Add some requests - await rq.add_requests_batched([f'https://example.com/{i}' for i in range(desired_request_count)]) + await rq.add_requests([f'https://example.com/{i}' for i in range(desired_request_count)]) total_count = await rq.get_total_count() Actor.log.info(f'Added {desired_request_count} requests in batch, total in queue: {total_count}') @@ -111,7 +111,7 @@ async def main() -> None: Request.from_url(f'https://example.com/{i}', unique_key=str(i - 1 if i % 4 == 1 else i)) for i in range(desired_request_count) ] - await rq.add_requests_batched(requests_to_add) + await rq.add_requests(requests_to_add) total_count = await rq.get_total_count() Actor.log.info( f'Added {desired_request_count} requests with duplicate unique keys, total in queue: {total_count}' @@ -454,7 +454,7 @@ async def main() -> None: assert initial_handled == 0, f'initial_handled={initial_handled}' # Add requests - await rq.add_requests_batched([f'https://example.com/{i}' for i in range(5)]) + await rq.add_requests([f'https://example.com/{i}' for i in range(5)]) Actor.log.info('Added 5 requests in batch') # Check counts after adding @@ -500,7 +500,7 @@ async def main() -> None: Actor.log.info(f'Prepared {len(batch_requests)} requests for batch add') # Add in batch - await rq.add_requests_batched(batch_requests) + await rq.add_requests(batch_requests) Actor.log.info('Batch add completed') # Verify all requests were added @@ -617,7 +617,7 @@ async def main() -> None: assert request is None, f'request={request}' # Check metadata for empty queue - metadata = await rq.get_info() + metadata = await rq.get_metadata() assert metadata is not None, f'metadata={metadata}' Actor.log.info( f'Empty queue metadata - Total: {metadata.total_request_count}, ' @@ -653,7 +653,7 @@ async def main() -> None: Actor.log.info(f'Created batch of {len(large_batch)} requests') # Add in batch - await rq.add_requests_batched(large_batch, batch_size=100, wait_for_all_requests_to_be_added=True) + await rq.add_requests(large_batch, batch_size=100, wait_for_all_requests_to_be_added=True) Actor.log.info('Large batch add completed') # Verify all requests were added @@ -712,7 +712,7 @@ async def main() -> None: Request.from_url('https://example.com/mixed2', method='POST'), 'https://example.com/mixed3', ] - await rq.add_requests_batched(mixed_batch) + await rq.add_requests(mixed_batch) Actor.log.info('Added mixed batch of strings and Request objects') total_count = await rq.get_total_count() @@ -851,7 +851,7 @@ async def main() -> None: # Add initial batch initial_requests = [f'https://example.com/persist/{i}' for i in range(10)] - await rq.add_requests_batched(initial_requests, wait_for_all_requests_to_be_added=True) + await rq.add_requests(initial_requests, wait_for_all_requests_to_be_added=True) Actor.log.info(f'Added initial batch of {len(initial_requests)} requests') initial_total = await rq.get_total_count() @@ -871,7 +871,7 @@ async def main() -> None: # Add more requests additional_requests = [f'https://example.com/additional/{i}' for i in range(5)] - await rq.add_requests_batched(additional_requests, wait_for_all_requests_to_be_added=True) + await rq.add_requests(additional_requests, wait_for_all_requests_to_be_added=True) Actor.log.info(f'Added additional batch of {len(additional_requests)} requests') # Check final state diff --git a/tests/unit/actor/test_actor_dataset.py b/tests/unit/actor/test_actor_dataset.py index ef6282bb..4e1b99d9 100644 --- a/tests/unit/actor/test_actor_dataset.py +++ b/tests/unit/actor/test_actor_dataset.py @@ -1,19 +1,9 @@ from __future__ import annotations -from typing import TYPE_CHECKING - import pytest -from apify_shared.consts import ActorEnvVars - from apify import Actor -if TYPE_CHECKING: - from crawlee.storage_clients import MemoryStorageClient - -# NOTE: We only test the dataset methods available on Actor class/instance. -# Actual tests for the implementations are in storages/. - async def test_throws_error_without_actor_init() -> None: with pytest.raises(RuntimeError): @@ -31,34 +21,19 @@ async def test_open_dataset_returns_same_references() -> None: dataset_by_name_2 = await Actor.open_dataset(name=dataset_name) assert dataset_by_name_1 is dataset_by_name_2 - dataset_by_id_1 = await Actor.open_dataset(id=dataset_by_name_1._id) - dataset_by_id_2 = await Actor.open_dataset(id=dataset_by_name_1._id) + dataset_1_metadata = await dataset_by_name_1.get_metadata() + dataset_by_id_1 = await Actor.open_dataset(id=dataset_1_metadata.id) + dataset_by_id_2 = await Actor.open_dataset(id=dataset_1_metadata.id) assert dataset_by_id_1 is dataset_by_name_1 assert dataset_by_id_2 is dataset_by_id_1 -async def test_open_dataset_uses_env_var( - monkeypatch: pytest.MonkeyPatch, - memory_storage_client: MemoryStorageClient, -) -> None: - default_dataset_id = 'my-new-default-id' - monkeypatch.setenv(ActorEnvVars.DEFAULT_DATASET_ID, default_dataset_id) - - async with Actor: - ddt = await Actor.open_dataset() - assert ddt._id == default_dataset_id - await memory_storage_client.dataset(ddt._id).delete() - - async def test_push_data_to_dataset() -> None: - async with Actor as my_actor: - dataset = await my_actor.open_dataset() + async with Actor as actor: + dataset = await actor.open_dataset() desired_item_count = 100 await dataset.push_data([{'id': i} for i in range(desired_item_count)]) - dataset_info = await dataset.get_info() - assert dataset_info is not None - list_page = await dataset.get_data(limit=desired_item_count) assert {item['id'] for item in list_page.items} == set(range(desired_item_count)) diff --git a/tests/unit/actor/test_actor_env_helpers.py b/tests/unit/actor/test_actor_env_helpers.py index e9eacdb2..27fc1c39 100644 --- a/tests/unit/actor/test_actor_env_helpers.py +++ b/tests/unit/actor/test_actor_env_helpers.py @@ -44,6 +44,7 @@ async def test_get_env_with_randomized_env_vars(monkeypatch: pytest.MonkeyPatch) ApifyEnvVars.LOG_FORMAT, ApifyEnvVars.LOG_LEVEL, ActorEnvVars.STANDBY_PORT, + ApifyEnvVars.PERSIST_STORAGE, } legacy_env_vars = { @@ -59,7 +60,7 @@ async def test_get_env_with_randomized_env_vars(monkeypatch: pytest.MonkeyPatch) } # Set up random env vars - expected_get_env: dict[str, Any] = {} + expected_get_env = dict[str, Any]() expected_get_env[ApifyEnvVars.LOG_LEVEL.name.lower()] = 'INFO' for int_env_var in INTEGER_ENV_VARS: diff --git a/tests/unit/actor/test_actor_key_value_store.py b/tests/unit/actor/test_actor_key_value_store.py index a175da3e..66d4a6e7 100644 --- a/tests/unit/actor/test_actor_key_value_store.py +++ b/tests/unit/actor/test_actor_key_value_store.py @@ -1,23 +1,16 @@ from __future__ import annotations -from typing import TYPE_CHECKING - import pytest from apify_shared.consts import ApifyEnvVars -from apify_shared.utils import json_dumps +from crawlee._utils.file import json_dumps from ..test_crypto import PRIVATE_KEY_PASSWORD, PRIVATE_KEY_PEM_BASE64, PUBLIC_KEY from apify import Actor from apify._consts import ENCRYPTED_JSON_VALUE_PREFIX, ENCRYPTED_STRING_VALUE_PREFIX from apify._crypto import public_encrypt -if TYPE_CHECKING: - from crawlee.storage_clients import MemoryStorageClient - -# NOTE: We only test the key-value store methods available on Actor class/instance. -# Actual tests for the implementations are in storages/. async def test_open_returns_same_references() -> None: async with Actor: kvs1 = await Actor.open_key_value_store() @@ -29,8 +22,9 @@ async def test_open_returns_same_references() -> None: kvs_by_name_2 = await Actor.open_key_value_store(name=kvs_name) assert kvs_by_name_1 is kvs_by_name_2 - kvs_by_id_1 = await Actor.open_key_value_store(id=kvs_by_name_1._id) - kvs_by_id_2 = await Actor.open_key_value_store(id=kvs_by_name_1._id) + kvs_1_metadata = await kvs_by_name_1.get_metadata() + kvs_by_id_1 = await Actor.open_key_value_store(id=kvs_1_metadata.id) + kvs_by_id_2 = await Actor.open_key_value_store(id=kvs_1_metadata.id) assert kvs_by_id_1 is kvs_by_name_1 assert kvs_by_id_2 is kvs_by_id_1 @@ -44,32 +38,24 @@ async def test_set_and_get_value() -> None: test_key = 'test_key' test_value = 'test_value' test_content_type = 'text/plain' - async with Actor as my_actor: - await my_actor.set_value(key=test_key, value=test_value, content_type=test_content_type) - value = await my_actor.get_value(key=test_key) + + async with Actor as actor: + await actor.set_value(key=test_key, value=test_value, content_type=test_content_type) + value = await actor.get_value(key=test_key) assert value == test_value -async def test_get_input(memory_storage_client: MemoryStorageClient) -> None: +async def test_get_input() -> None: input_key = 'INPUT' test_input = {'foo': 'bar'} - await memory_storage_client.key_value_stores().get_or_create(id='default') - await memory_storage_client.key_value_store('default').set_record( - key=input_key, - value=json_dumps(test_input), - content_type='application/json', - ) - - async with Actor as my_actor: - input = await my_actor.get_input() # noqa: A001 - assert input['foo'] == test_input['foo'] + async with Actor as actor: + await actor.set_value(key=input_key, value=test_input) + actor_input = await actor.get_input() + assert actor_input['foo'] == test_input['foo'] -async def test_get_input_with_encrypted_secrets( - monkeypatch: pytest.MonkeyPatch, - memory_storage_client: MemoryStorageClient, -) -> None: +async def test_get_input_with_encrypted_secrets(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv(ApifyEnvVars.INPUT_SECRETS_PRIVATE_KEY_FILE, PRIVATE_KEY_PEM_BASE64) monkeypatch.setenv(ApifyEnvVars.INPUT_SECRETS_PRIVATE_KEY_PASSPHRASE, PRIVATE_KEY_PASSWORD) @@ -84,9 +70,9 @@ async def test_get_input_with_encrypted_secrets( # and includes schemahash. We are testing both formats to ensure backward compatibility. encrypted_string_legacy = public_encrypt(secret_string_legacy, public_key=PUBLIC_KEY) - encrypted_string = public_encrypt(json_dumps(secret_string), public_key=PUBLIC_KEY) - encrypted_object = public_encrypt(json_dumps(secret_object), public_key=PUBLIC_KEY) - encrypted_array = public_encrypt(json_dumps(secret_array), public_key=PUBLIC_KEY) + encrypted_string = public_encrypt(await json_dumps(secret_string), public_key=PUBLIC_KEY) + encrypted_object = public_encrypt(await json_dumps(secret_object), public_key=PUBLIC_KEY) + encrypted_array = public_encrypt(await json_dumps(secret_array), public_key=PUBLIC_KEY) input_with_secret = { 'foo': 'bar', @@ -112,17 +98,11 @@ async def test_get_input_with_encrypted_secrets( ), } - await memory_storage_client.key_value_stores().get_or_create(id='default') - await memory_storage_client.key_value_store('default').set_record( - key=input_key, - value=json_dumps(input_with_secret), - content_type='application/json', - ) - - async with Actor as my_actor: - input = await my_actor.get_input() # noqa: A001 - assert input['foo'] == input_with_secret['foo'] - assert input['secret_string_legacy'] == secret_string_legacy - assert input['secret_string'] == secret_string - assert input['secret_object'] == secret_object - assert input['secret_array'] == secret_array + async with Actor as actor: + await actor.set_value(key=input_key, value=input_with_secret, content_type='application/json') + actor_input = await actor.get_input() + assert actor_input['foo'] == input_with_secret['foo'] + assert actor_input['secret_string_legacy'] == secret_string_legacy + assert actor_input['secret_string'] == secret_string + assert actor_input['secret_object'] == secret_object + assert actor_input['secret_array'] == secret_array diff --git a/tests/unit/actor/test_actor_request_queue.py b/tests/unit/actor/test_actor_request_queue.py index 5504715f..d7c52771 100644 --- a/tests/unit/actor/test_actor_request_queue.py +++ b/tests/unit/actor/test_actor_request_queue.py @@ -4,8 +4,6 @@ from apify import Actor -# NOTE: We only test the references here. Actual tests for the implementations are in storages/ - async def test_open_throws_without_init() -> None: with pytest.raises(RuntimeError): @@ -23,7 +21,8 @@ async def test_open_returns_same_references() -> None: rq_by_name_2 = await Actor.open_key_value_store(name=rq_name) assert rq_by_name_1 is rq_by_name_2 - rq_by_id_1 = await Actor.open_key_value_store(id=rq_by_name_1._id) - rq_by_id_2 = await Actor.open_key_value_store(id=rq_by_name_1._id) + rq_1_metadata = await rq_by_name_1.get_metadata() + rq_by_id_1 = await Actor.open_key_value_store(id=rq_1_metadata.id) + rq_by_id_2 = await Actor.open_key_value_store(id=rq_1_metadata.id) assert rq_by_id_1 is rq_by_name_1 assert rq_by_id_2 is rq_by_id_1 diff --git a/tests/unit/actor/test_request_list.py b/tests/unit/actor/test_request_list.py index bcc60578..3ed751c0 100644 --- a/tests/unit/actor/test_request_list.py +++ b/tests/unit/actor/test_request_list.py @@ -11,7 +11,8 @@ from crawlee._request import UserData from crawlee._types import HttpMethod -from apify.storages._request_list import URL_NO_COMMAS_REGEX, RequestList +from apify.request_loaders import ApifyRequestList +from apify.request_loaders._apify_request_list import URL_NO_COMMAS_REGEX if TYPE_CHECKING: from pytest_httpserver import HTTPServer @@ -53,7 +54,7 @@ async def test_request_list_open_request_types( } request_dict_input = {**minimal_request_dict_input, **optional_input} - request_list = await RequestList.open(request_list_sources_input=[request_dict_input]) + request_list = await ApifyRequestList.open(request_list_sources_input=[request_dict_input]) assert not await request_list.is_empty() request = await request_list.fetch_next_request() @@ -102,7 +103,7 @@ def request_handler(request: Request, response: Response) -> Response: httpserver.expect_oneshot_request(path).with_post_hook(request_handler).respond_with_data(status=200) routes[entry['requestsFromUrl']] = Mock() - await RequestList.open(request_list_sources_input=request_list_sources_input) + await ApifyRequestList.open(request_list_sources_input=request_list_sources_input) assert len(routes) == len(request_list_sources_input) @@ -150,7 +151,7 @@ class MockedUrlInfo: path = str(URL(mocked_url.url).path) httpserver.expect_oneshot_request(path).respond_with_data(status=200, response_data=mocked_url.response_text) - request_list = await RequestList.open(request_list_sources_input=request_list_sources_input) + request_list = await ApifyRequestList.open(request_list_sources_input=request_list_sources_input) generated_requests = [] while request := await request_list.fetch_next_request(): generated_requests.append(request) @@ -171,7 +172,7 @@ async def test_request_list_open_from_url_additional_inputs(httpserver: HTTPServ } httpserver.expect_oneshot_request('/file.txt').respond_with_data(status=200, response_data=expected_url) - request_list = await RequestList.open(request_list_sources_input=[example_start_url_input]) + request_list = await ApifyRequestList.open(request_list_sources_input=[example_start_url_input]) request = await request_list.fetch_next_request() # Check all properties correctly created for request assert request @@ -187,7 +188,7 @@ async def test_request_list_open_from_url_additional_inputs(httpserver: HTTPServ async def test_request_list_open_name() -> None: name = 'some_name' - request_list = await RequestList.open(name=name) + request_list = await ApifyRequestList.open(name=name) assert request_list.name == name diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 28a1e460..d44aa986 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -14,9 +14,6 @@ from apify_client import ApifyClientAsync from apify_shared.consts import ApifyEnvVars from crawlee import service_locator -from crawlee.configuration import Configuration as CrawleeConfiguration -from crawlee.storage_clients import MemoryStorageClient -from crawlee.storages import _creation_management import apify._actor @@ -48,24 +45,16 @@ def _prepare_test_env() -> None: # Set the environment variable for the local storage directory to the temporary path. monkeypatch.setenv(ApifyEnvVars.LOCAL_STORAGE_DIR, str(tmp_path)) - # Reset the flags in the service locator to indicate that no services are explicitly set. This ensures - # a clean state, as services might have been set during a previous test and not reset properly. - service_locator._configuration_was_retrieved = False - service_locator._storage_client_was_retrieved = False - service_locator._event_manager_was_retrieved = False - # Reset the services in the service locator. service_locator._configuration = None service_locator._event_manager = None service_locator._storage_client = None + service_locator._storage_instance_manager = None - # Clear creation-related caches to ensure no state is carried over between tests. - monkeypatch.setattr(_creation_management, '_cache_dataset_by_id', {}) - monkeypatch.setattr(_creation_management, '_cache_dataset_by_name', {}) - monkeypatch.setattr(_creation_management, '_cache_kvs_by_id', {}) - monkeypatch.setattr(_creation_management, '_cache_kvs_by_name', {}) - monkeypatch.setattr(_creation_management, '_cache_rq_by_id', {}) - monkeypatch.setattr(_creation_management, '_cache_rq_by_name', {}) + # Reset the retrieval flags. + service_locator._configuration_was_retrieved = False + service_locator._event_manager_was_retrieved = False + service_locator._storage_client_was_retrieved = False # Verify that the test environment was set up correctly. assert os.environ.get(ApifyEnvVars.LOCAL_STORAGE_DIR) == str(tmp_path) @@ -183,15 +172,6 @@ def apify_client_async_patcher(monkeypatch: pytest.MonkeyPatch) -> ApifyClientAs return ApifyClientAsyncPatcher(monkeypatch) -@pytest.fixture -def memory_storage_client() -> MemoryStorageClient: - configuration = CrawleeConfiguration() - configuration.persist_storage = True - configuration.write_metadata = True - - return MemoryStorageClient.from_config(configuration) - - @pytest.fixture(scope='session') def make_httpserver() -> Iterator[HTTPServer]: werkzeug_logger = getLogger('werkzeug') diff --git a/tests/unit/events/__init__.py b/tests/unit/events/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/test_platform_event_manager.py b/tests/unit/events/test_apify_event_manager.py similarity index 93% rename from tests/unit/test_platform_event_manager.py rename to tests/unit/events/test_apify_event_manager.py index 7389d4da..410a577a 100644 --- a/tests/unit/test_platform_event_manager.py +++ b/tests/unit/events/test_apify_event_manager.py @@ -15,7 +15,8 @@ from crawlee.events._types import Event from apify import Configuration -from apify._platform_event_manager import PlatformEventManager, SystemInfoEventData +from apify.events import ApifyEventManager +from apify.events._types import SystemInfoEventData if TYPE_CHECKING: from collections.abc import Callable @@ -26,7 +27,7 @@ async def test_lifecycle_local(caplog: pytest.LogCaptureFixture) -> None: caplog.set_level(logging.DEBUG, logger='apify') config = Configuration.get_global_configuration() - async with PlatformEventManager(config): + async with ApifyEventManager(config): pass assert len(caplog.records) == 1 @@ -40,7 +41,7 @@ async def test_lifecycle_local(caplog: pytest.LogCaptureFixture) -> None: async def test_event_handling_local() -> None: config = Configuration.get_global_configuration() - async with PlatformEventManager(config) as event_manager: + async with ApifyEventManager(config) as event_manager: event_calls = defaultdict(list) def on_event(event: Event, id: int | None = None) -> Callable: @@ -110,7 +111,7 @@ async def test_event_async_handling_local() -> None: dummy_system_info = Mock() config = Configuration.get_global_configuration() - async with PlatformEventManager(config) as event_manager: + async with ApifyEventManager(config) as event_manager: event_calls = [] async def event_handler(data: Any) -> None: @@ -129,7 +130,7 @@ async def event_handler(data: Any) -> None: async def test_lifecycle_on_platform_without_websocket(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv(ActorEnvVars.EVENTS_WEBSOCKET_URL, 'ws://localhost:56565') - event_manager = PlatformEventManager(Configuration.get_global_configuration()) + event_manager = ApifyEventManager(Configuration.get_global_configuration()) with pytest.raises(RuntimeError, match='Error connecting to platform events websocket!'): async with event_manager: @@ -152,7 +153,7 @@ async def handler(websocket: websockets.asyncio.server.ServerConnection) -> None port: int = ws_server.sockets[0].getsockname()[1] # type: ignore[index] monkeypatch.setenv(ActorEnvVars.EVENTS_WEBSOCKET_URL, f'ws://localhost:{port}') - async with PlatformEventManager(Configuration.get_global_configuration()): + async with ApifyEventManager(Configuration.get_global_configuration()): assert len(connected_ws_clients) == 1 @@ -191,7 +192,7 @@ async def send_platform_event(event_name: Event, data: Any = None) -> None: } SystemInfoEventData.model_validate(dummy_system_info) - async with PlatformEventManager(Configuration.get_global_configuration()) as event_manager: + async with ApifyEventManager(Configuration.get_global_configuration()) as event_manager: event_calls = [] def listener(data: Any) -> None: diff --git a/tests/unit/scrapy/requests/test_to_scrapy_request.py b/tests/unit/scrapy/requests/test_to_scrapy_request.py index d1481a98..2b8f0ab7 100644 --- a/tests/unit/scrapy/requests/test_to_scrapy_request.py +++ b/tests/unit/scrapy/requests/test_to_scrapy_request.py @@ -5,9 +5,9 @@ import pytest from scrapy import Request, Spider -from crawlee import Request as CrawleeRequest from crawlee._types import HttpHeaders +from apify import Request as ApifyRequest from apify.scrapy.requests import to_scrapy_request @@ -23,7 +23,7 @@ def spider() -> DummySpider: def test_without_reconstruction(spider: Spider) -> None: # Without reconstruction of encoded Scrapy request - apify_request = CrawleeRequest( + apify_request = ApifyRequest( url='https://example.com', method='GET', unique_key='https://example.com', @@ -42,7 +42,7 @@ def test_without_reconstruction(spider: Spider) -> None: def test_without_reconstruction_with_optional_fields(spider: Spider) -> None: # Without reconstruction of encoded Scrapy request - apify_request = CrawleeRequest( + apify_request = ApifyRequest( url='https://crawlee.dev', method='GET', unique_key='https://crawlee.dev', @@ -67,7 +67,7 @@ def test_without_reconstruction_with_optional_fields(spider: Spider) -> None: def test_with_reconstruction(spider: Spider) -> None: # With reconstruction of encoded Scrapy request - apify_request = CrawleeRequest( + apify_request = ApifyRequest( url='https://apify.com', method='GET', id='fvwscO2UJLdr10B', @@ -89,7 +89,7 @@ def test_with_reconstruction(spider: Spider) -> None: def test_with_reconstruction_with_optional_fields(spider: Spider) -> None: # With reconstruction of encoded Scrapy request - apify_request = CrawleeRequest( + apify_request = ApifyRequest( url='https://apify.com', method='GET', id='fvwscO2UJLdr10B', @@ -116,7 +116,7 @@ def test_with_reconstruction_with_optional_fields(spider: Spider) -> None: def test_invalid_request_for_reconstruction(spider: Spider) -> None: - apify_request = CrawleeRequest( + apify_request = ApifyRequest( url='https://example.com', method='GET', id='invalid123', diff --git a/tests/unit/storage_clients/__init__.py b/tests/unit/storage_clients/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/storage_clients/test_file_system.py b/tests/unit/storage_clients/test_file_system.py new file mode 100644 index 00000000..c14e9813 --- /dev/null +++ b/tests/unit/storage_clients/test_file_system.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +import asyncio + +from crawlee._consts import METADATA_FILENAME + +from apify import Configuration +from apify.storage_clients._file_system import ApifyFileSystemKeyValueStoreClient + + +async def test_purge_preserves_input_file_and_metadata() -> None: + """Test that purge() preserves INPUT.json and metadata files but removes other files.""" + # Get the global configuration (storage directory is set by test fixtures) + configuration = Configuration.get_global_configuration() + + kvs_storage_client = await ApifyFileSystemKeyValueStoreClient.open( + id=None, + name='test-kvs', + configuration=configuration, + ) + + # Create some test files in the KVS directory + kvs_path = kvs_storage_client.path_to_kvs + + # Create various files + input_file = kvs_path / f'{configuration.input_key}.json' + metadata_file = kvs_path / METADATA_FILENAME + regular_file1 = kvs_path / 'regular_file1.json' + regular_file2 = kvs_path / 'another_file.txt' + + # Write content to files + await asyncio.to_thread(input_file.write_text, '{"test": "input"}') + await asyncio.to_thread(regular_file1.write_text, '{"test": "data1"}') + await asyncio.to_thread(regular_file2.write_text, 'some text content') + + # Verify all files exist before purge + assert input_file.exists() + assert metadata_file.exists() # Should exist from client creation + assert regular_file1.exists() + assert regular_file2.exists() + + # Purge the key-value store + await kvs_storage_client.purge() + + # Verify INPUT.json and metadata are preserved + assert input_file.exists(), f'{configuration.input_key} should be preserved during purge' + assert metadata_file.exists(), f'{METADATA_FILENAME} should be preserved during purge' + + # Verify other files are deleted + assert not regular_file1.exists(), 'Regular files should be deleted during purge' + assert not regular_file2.exists(), 'Regular files should be deleted during purge' + + # Verify INPUT.json content is unchanged + input_content = await asyncio.to_thread(input_file.read_text) + assert input_content == '{"test": "input"}' diff --git a/uv.lock b/uv.lock index 89a9f5c7..3ca8d5c3 100644 --- a/uv.lock +++ b/uv.lock @@ -33,6 +33,7 @@ source = { editable = "." } dependencies = [ { name = "apify-client" }, { name = "apify-shared" }, + { name = "cachetools" }, { name = "crawlee" }, { name = "cryptography" }, { name = "httpx" }, @@ -64,6 +65,7 @@ dev = [ { name = "pytest-xdist" }, { name = "ruff" }, { name = "setuptools" }, + { name = "types-cachetools" }, { name = "uvicorn", extra = ["standard"] }, { name = "werkzeug" }, { name = "yarl" }, @@ -73,7 +75,8 @@ dev = [ requires-dist = [ { name = "apify-client", specifier = "<2.0.0" }, { name = "apify-shared", specifier = "<2.0.0" }, - { name = "crawlee", specifier = "~=0.6.0" }, + { name = "cachetools", specifier = ">=5.5.0" }, + { name = "crawlee", git = "https://github.com/apify/crawlee-python.git?rev=master" }, { name = "cryptography", specifier = ">=42.0.0" }, { name = "httpx", specifier = ">=0.27.0" }, { name = "lazy-object-proxy", specifier = "<1.11.0" }, @@ -101,6 +104,7 @@ dev = [ { name = "pytest-xdist", specifier = "~=3.8.0" }, { name = "ruff", specifier = "~=0.12.0" }, { name = "setuptools" }, + { name = "types-cachetools", specifier = ">=6.0.0.20250525" }, { name = "uvicorn", extras = ["standard"] }, { name = "werkzeug", specifier = "~=3.1.3" }, { name = "yarl", specifier = "~=1.20.0" }, @@ -121,22 +125,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6e/c2/c7a1568aec801aa84bbaf93ab390b6bd57e850be30443365370ca3a9ccdc/apify_client-1.12.1-py3-none-any.whl", hash = "sha256:0b331677697dfa1038d17154284fc0bad1b18ba52ab792beb53711af81eac30a", size = 83218, upload-time = "2025-07-30T09:07:04.513Z" }, ] -[[package]] -name = "apify-fingerprint-datapoints" -version = "0.0.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/10/43/f3d3aacd305b9c80b4e76e3a68ab787967bd1db73ee59cc2bfcb4fde9f9b/apify_fingerprint_datapoints-0.0.3.tar.gz", hash = "sha256:2d8c501562e2db745c2cca14cc05bc66a0e60251ae8f21f90bdbf8f647c8ffe2", size = 625384, upload-time = "2025-06-27T11:07:42.914Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/ec/b5451d7f3117e8315445cbb0f16a83987164c112e6ee04f5645337e70e61/apify_fingerprint_datapoints-0.0.3-py3-none-any.whl", hash = "sha256:4881883511bcce7797d9f11292b807c031ce8427bb8cf1c947ed92d53e868c92", size = 354690, upload-time = "2025-06-27T11:07:41.588Z" }, -] - [[package]] name = "apify-shared" -version = "1.5.0" +version = "1.4.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/63/3e/96de53973fa0704d9b99339fad1838b53d9340870bafc7a9a9f41a7d266f/apify_shared-1.5.0.tar.gz", hash = "sha256:1cba58f0144127f7b52cced426a6527e9722620e9fd1c4ddb6f9c8ce16db0ef1", size = 14639, upload-time = "2025-08-05T11:10:20.617Z" } +sdist = { url = "https://files.pythonhosted.org/packages/50/90/8c124864a372693a86c26efc38de27440a03bc69a18055399041dd18fa24/apify_shared-1.4.2.tar.gz", hash = "sha256:7190f2b7557b50b40acb32a1fcc783ea8a0fa58bf3cf33fc03e23de49f318b45", size = 13889, upload-time = "2025-08-01T07:38:54.625Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/87/fe6b3e7eec76e083ce54bb1b4a19b7dd8f6d3441a3a05e053af6607fcda4/apify_shared-1.5.0-py3-none-any.whl", hash = "sha256:46409a75140d25f3487da87adbf446390214e08cda79c2938aaee085e8f7f9dd", size = 13467, upload-time = "2025-08-05T11:10:19.187Z" }, + { url = "https://files.pythonhosted.org/packages/c2/d6/e3864ffe8886713aa5306d38b0e90237085d4951699a4be39adbcc4194e5/apify_shared-1.4.2-py3-none-any.whl", hash = "sha256:1958b843c4e16af0804b3f6ba886264091b54c15bf524606fafb55d20ed08fff", size = 12725, upload-time = "2025-08-01T07:38:53.556Z" }, ] [[package]] @@ -196,110 +191,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7b/14/4da7b12a9abc43a601c215cb5a3d176734578da109f0dbf0a832ed78be09/black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e", size = 194363, upload-time = "2023-12-22T23:06:14.278Z" }, ] -[[package]] -name = "brotli" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2f/c2/f9e977608bdf958650638c3f1e28f85a1b075f075ebbe77db8555463787b/Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724", size = 7372270, upload-time = "2023-09-07T14:05:41.643Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/3a/dbf4fb970c1019a57b5e492e1e0eae745d32e59ba4d6161ab5422b08eefe/Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752", size = 873045, upload-time = "2023-09-07T14:03:16.894Z" }, - { url = "https://files.pythonhosted.org/packages/dd/11/afc14026ea7f44bd6eb9316d800d439d092c8d508752055ce8d03086079a/Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9", size = 446218, upload-time = "2023-09-07T14:03:18.917Z" }, - { url = "https://files.pythonhosted.org/packages/36/83/7545a6e7729db43cb36c4287ae388d6885c85a86dd251768a47015dfde32/Brotli-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae56aca0402a0f9a3431cddda62ad71666ca9d4dc3a10a142b9dce2e3c0cda3", size = 2903872, upload-time = "2023-09-07T14:03:20.398Z" }, - { url = "https://files.pythonhosted.org/packages/32/23/35331c4d9391fcc0f29fd9bec2c76e4b4eeab769afbc4b11dd2e1098fb13/Brotli-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43ce1b9935bfa1ede40028054d7f48b5469cd02733a365eec8a329ffd342915d", size = 2941254, upload-time = "2023-09-07T14:03:21.914Z" }, - { url = "https://files.pythonhosted.org/packages/3b/24/1671acb450c902edb64bd765d73603797c6c7280a9ada85a195f6b78c6e5/Brotli-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c4855522edb2e6ae7fdb58e07c3ba9111e7621a8956f481c68d5d979c93032e", size = 2857293, upload-time = "2023-09-07T14:03:24Z" }, - { url = "https://files.pythonhosted.org/packages/d5/00/40f760cc27007912b327fe15bf6bfd8eaecbe451687f72a8abc587d503b3/Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:38025d9f30cf4634f8309c6874ef871b841eb3c347e90b0851f63d1ded5212da", size = 3002385, upload-time = "2023-09-07T14:03:26.248Z" }, - { url = "https://files.pythonhosted.org/packages/b8/cb/8aaa83f7a4caa131757668c0fb0c4b6384b09ffa77f2fba9570d87ab587d/Brotli-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6a904cb26bfefc2f0a6f240bdf5233be78cd2488900a2f846f3c3ac8489ab80", size = 2911104, upload-time = "2023-09-07T14:03:27.849Z" }, - { url = "https://files.pythonhosted.org/packages/bc/c4/65456561d89d3c49f46b7fbeb8fe6e449f13bdc8ea7791832c5d476b2faf/Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d", size = 2809981, upload-time = "2023-09-07T14:03:29.92Z" }, - { url = "https://files.pythonhosted.org/packages/05/1b/cf49528437bae28abce5f6e059f0d0be6fecdcc1d3e33e7c54b3ca498425/Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0", size = 2935297, upload-time = "2023-09-07T14:03:32.035Z" }, - { url = "https://files.pythonhosted.org/packages/81/ff/190d4af610680bf0c5a09eb5d1eac6e99c7c8e216440f9c7cfd42b7adab5/Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e", size = 2930735, upload-time = "2023-09-07T14:03:33.801Z" }, - { url = "https://files.pythonhosted.org/packages/80/7d/f1abbc0c98f6e09abd3cad63ec34af17abc4c44f308a7a539010f79aae7a/Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c", size = 2933107, upload-time = "2024-10-18T12:32:09.016Z" }, - { url = "https://files.pythonhosted.org/packages/34/ce/5a5020ba48f2b5a4ad1c0522d095ad5847a0be508e7d7569c8630ce25062/Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1", size = 2845400, upload-time = "2024-10-18T12:32:11.134Z" }, - { url = "https://files.pythonhosted.org/packages/44/89/fa2c4355ab1eecf3994e5a0a7f5492c6ff81dfcb5f9ba7859bd534bb5c1a/Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2", size = 3031985, upload-time = "2024-10-18T12:32:12.813Z" }, - { url = "https://files.pythonhosted.org/packages/af/a4/79196b4a1674143d19dca400866b1a4d1a089040df7b93b88ebae81f3447/Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec", size = 2927099, upload-time = "2024-10-18T12:32:14.733Z" }, - { url = "https://files.pythonhosted.org/packages/e9/54/1c0278556a097f9651e657b873ab08f01b9a9ae4cac128ceb66427d7cd20/Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2", size = 333172, upload-time = "2023-09-07T14:03:35.212Z" }, - { url = "https://files.pythonhosted.org/packages/f7/65/b785722e941193fd8b571afd9edbec2a9b838ddec4375d8af33a50b8dab9/Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128", size = 357255, upload-time = "2023-09-07T14:03:36.447Z" }, - { url = "https://files.pythonhosted.org/packages/96/12/ad41e7fadd5db55459c4c401842b47f7fee51068f86dd2894dd0dcfc2d2a/Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc", size = 873068, upload-time = "2023-09-07T14:03:37.779Z" }, - { url = "https://files.pythonhosted.org/packages/95/4e/5afab7b2b4b61a84e9c75b17814198ce515343a44e2ed4488fac314cd0a9/Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6", size = 446244, upload-time = "2023-09-07T14:03:39.223Z" }, - { url = "https://files.pythonhosted.org/packages/9d/e6/f305eb61fb9a8580c525478a4a34c5ae1a9bcb12c3aee619114940bc513d/Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd", size = 2906500, upload-time = "2023-09-07T14:03:40.858Z" }, - { url = "https://files.pythonhosted.org/packages/3e/4f/af6846cfbc1550a3024e5d3775ede1e00474c40882c7bf5b37a43ca35e91/Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf", size = 2943950, upload-time = "2023-09-07T14:03:42.896Z" }, - { url = "https://files.pythonhosted.org/packages/b3/e7/ca2993c7682d8629b62630ebf0d1f3bb3d579e667ce8e7ca03a0a0576a2d/Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61", size = 2918527, upload-time = "2023-09-07T14:03:44.552Z" }, - { url = "https://files.pythonhosted.org/packages/b3/96/da98e7bedc4c51104d29cc61e5f449a502dd3dbc211944546a4cc65500d3/Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327", size = 2845489, upload-time = "2023-09-07T14:03:46.594Z" }, - { url = "https://files.pythonhosted.org/packages/e8/ef/ccbc16947d6ce943a7f57e1a40596c75859eeb6d279c6994eddd69615265/Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd", size = 2914080, upload-time = "2023-09-07T14:03:48.204Z" }, - { url = "https://files.pythonhosted.org/packages/80/d6/0bd38d758d1afa62a5524172f0b18626bb2392d717ff94806f741fcd5ee9/Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9", size = 2813051, upload-time = "2023-09-07T14:03:50.348Z" }, - { url = "https://files.pythonhosted.org/packages/14/56/48859dd5d129d7519e001f06dcfbb6e2cf6db92b2702c0c2ce7d97e086c1/Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265", size = 2938172, upload-time = "2023-09-07T14:03:52.395Z" }, - { url = "https://files.pythonhosted.org/packages/3d/77/a236d5f8cd9e9f4348da5acc75ab032ab1ab2c03cc8f430d24eea2672888/Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8", size = 2933023, upload-time = "2023-09-07T14:03:53.96Z" }, - { url = "https://files.pythonhosted.org/packages/f1/87/3b283efc0f5cb35f7f84c0c240b1e1a1003a5e47141a4881bf87c86d0ce2/Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f", size = 2935871, upload-time = "2024-10-18T12:32:16.688Z" }, - { url = "https://files.pythonhosted.org/packages/f3/eb/2be4cc3e2141dc1a43ad4ca1875a72088229de38c68e842746b342667b2a/Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757", size = 2847784, upload-time = "2024-10-18T12:32:18.459Z" }, - { url = "https://files.pythonhosted.org/packages/66/13/b58ddebfd35edde572ccefe6890cf7c493f0c319aad2a5badee134b4d8ec/Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0", size = 3034905, upload-time = "2024-10-18T12:32:20.192Z" }, - { url = "https://files.pythonhosted.org/packages/84/9c/bc96b6c7db824998a49ed3b38e441a2cae9234da6fa11f6ed17e8cf4f147/Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b", size = 2929467, upload-time = "2024-10-18T12:32:21.774Z" }, - { url = "https://files.pythonhosted.org/packages/e7/71/8f161dee223c7ff7fea9d44893fba953ce97cf2c3c33f78ba260a91bcff5/Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50", size = 333169, upload-time = "2023-09-07T14:03:55.404Z" }, - { url = "https://files.pythonhosted.org/packages/02/8a/fece0ee1057643cb2a5bbf59682de13f1725f8482b2c057d4e799d7ade75/Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1", size = 357253, upload-time = "2023-09-07T14:03:56.643Z" }, - { url = "https://files.pythonhosted.org/packages/5c/d0/5373ae13b93fe00095a58efcbce837fd470ca39f703a235d2a999baadfbc/Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28", size = 815693, upload-time = "2024-10-18T12:32:23.824Z" }, - { url = "https://files.pythonhosted.org/packages/8e/48/f6e1cdf86751300c288c1459724bfa6917a80e30dbfc326f92cea5d3683a/Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f", size = 422489, upload-time = "2024-10-18T12:32:25.641Z" }, - { url = "https://files.pythonhosted.org/packages/06/88/564958cedce636d0f1bed313381dfc4b4e3d3f6015a63dae6146e1b8c65c/Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409", size = 873081, upload-time = "2023-09-07T14:03:57.967Z" }, - { url = "https://files.pythonhosted.org/packages/58/79/b7026a8bb65da9a6bb7d14329fd2bd48d2b7f86d7329d5cc8ddc6a90526f/Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2", size = 446244, upload-time = "2023-09-07T14:03:59.319Z" }, - { url = "https://files.pythonhosted.org/packages/e5/18/c18c32ecea41b6c0004e15606e274006366fe19436b6adccc1ae7b2e50c2/Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451", size = 2906505, upload-time = "2023-09-07T14:04:01.327Z" }, - { url = "https://files.pythonhosted.org/packages/08/c8/69ec0496b1ada7569b62d85893d928e865df29b90736558d6c98c2031208/Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91", size = 2944152, upload-time = "2023-09-07T14:04:03.033Z" }, - { url = "https://files.pythonhosted.org/packages/ab/fb/0517cea182219d6768113a38167ef6d4eb157a033178cc938033a552ed6d/Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408", size = 2919252, upload-time = "2023-09-07T14:04:04.675Z" }, - { url = "https://files.pythonhosted.org/packages/c7/53/73a3431662e33ae61a5c80b1b9d2d18f58dfa910ae8dd696e57d39f1a2f5/Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0", size = 2845955, upload-time = "2023-09-07T14:04:06.585Z" }, - { url = "https://files.pythonhosted.org/packages/55/ac/bd280708d9c5ebdbf9de01459e625a3e3803cce0784f47d633562cf40e83/Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc", size = 2914304, upload-time = "2023-09-07T14:04:08.668Z" }, - { url = "https://files.pythonhosted.org/packages/76/58/5c391b41ecfc4527d2cc3350719b02e87cb424ef8ba2023fb662f9bf743c/Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180", size = 2814452, upload-time = "2023-09-07T14:04:10.736Z" }, - { url = "https://files.pythonhosted.org/packages/c7/4e/91b8256dfe99c407f174924b65a01f5305e303f486cc7a2e8a5d43c8bec3/Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248", size = 2938751, upload-time = "2023-09-07T14:04:12.875Z" }, - { url = "https://files.pythonhosted.org/packages/5a/a6/e2a39a5d3b412938362bbbeba5af904092bf3f95b867b4a3eb856104074e/Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966", size = 2933757, upload-time = "2023-09-07T14:04:14.551Z" }, - { url = "https://files.pythonhosted.org/packages/13/f0/358354786280a509482e0e77c1a5459e439766597d280f28cb097642fc26/Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9", size = 2936146, upload-time = "2024-10-18T12:32:27.257Z" }, - { url = "https://files.pythonhosted.org/packages/80/f7/daf538c1060d3a88266b80ecc1d1c98b79553b3f117a485653f17070ea2a/Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb", size = 2848055, upload-time = "2024-10-18T12:32:29.376Z" }, - { url = "https://files.pythonhosted.org/packages/ad/cf/0eaa0585c4077d3c2d1edf322d8e97aabf317941d3a72d7b3ad8bce004b0/Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111", size = 3035102, upload-time = "2024-10-18T12:32:31.371Z" }, - { url = "https://files.pythonhosted.org/packages/d8/63/1c1585b2aa554fe6dbce30f0c18bdbc877fa9a1bf5ff17677d9cca0ac122/Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839", size = 2930029, upload-time = "2024-10-18T12:32:33.293Z" }, - { url = "https://files.pythonhosted.org/packages/5f/3b/4e3fd1893eb3bbfef8e5a80d4508bec17a57bb92d586c85c12d28666bb13/Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0", size = 333276, upload-time = "2023-09-07T14:04:16.49Z" }, - { url = "https://files.pythonhosted.org/packages/3d/d5/942051b45a9e883b5b6e98c041698b1eb2012d25e5948c58d6bf85b1bb43/Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951", size = 357255, upload-time = "2023-09-07T14:04:17.83Z" }, - { url = "https://files.pythonhosted.org/packages/0a/9f/fb37bb8ffc52a8da37b1c03c459a8cd55df7a57bdccd8831d500e994a0ca/Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5", size = 815681, upload-time = "2024-10-18T12:32:34.942Z" }, - { url = "https://files.pythonhosted.org/packages/06/b3/dbd332a988586fefb0aa49c779f59f47cae76855c2d00f450364bb574cac/Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8", size = 422475, upload-time = "2024-10-18T12:32:36.485Z" }, - { url = "https://files.pythonhosted.org/packages/bb/80/6aaddc2f63dbcf2d93c2d204e49c11a9ec93a8c7c63261e2b4bd35198283/Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f", size = 2906173, upload-time = "2024-10-18T12:32:37.978Z" }, - { url = "https://files.pythonhosted.org/packages/ea/1d/e6ca79c96ff5b641df6097d299347507d39a9604bde8915e76bf026d6c77/Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648", size = 2943803, upload-time = "2024-10-18T12:32:39.606Z" }, - { url = "https://files.pythonhosted.org/packages/ac/a3/d98d2472e0130b7dd3acdbb7f390d478123dbf62b7d32bda5c830a96116d/Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0", size = 2918946, upload-time = "2024-10-18T12:32:41.679Z" }, - { url = "https://files.pythonhosted.org/packages/c4/a5/c69e6d272aee3e1423ed005d8915a7eaa0384c7de503da987f2d224d0721/Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089", size = 2845707, upload-time = "2024-10-18T12:32:43.478Z" }, - { url = "https://files.pythonhosted.org/packages/58/9f/4149d38b52725afa39067350696c09526de0125ebfbaab5acc5af28b42ea/Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368", size = 2936231, upload-time = "2024-10-18T12:32:45.224Z" }, - { url = "https://files.pythonhosted.org/packages/5a/5a/145de884285611838a16bebfdb060c231c52b8f84dfbe52b852a15780386/Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c", size = 2848157, upload-time = "2024-10-18T12:32:46.894Z" }, - { url = "https://files.pythonhosted.org/packages/50/ae/408b6bfb8525dadebd3b3dd5b19d631da4f7d46420321db44cd99dcf2f2c/Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284", size = 3035122, upload-time = "2024-10-18T12:32:48.844Z" }, - { url = "https://files.pythonhosted.org/packages/af/85/a94e5cfaa0ca449d8f91c3d6f78313ebf919a0dbd55a100c711c6e9655bc/Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7", size = 2930206, upload-time = "2024-10-18T12:32:51.198Z" }, - { url = "https://files.pythonhosted.org/packages/c2/f0/a61d9262cd01351df22e57ad7c34f66794709acab13f34be2675f45bf89d/Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0", size = 333804, upload-time = "2024-10-18T12:32:52.661Z" }, - { url = "https://files.pythonhosted.org/packages/7e/c1/ec214e9c94000d1c1974ec67ced1c970c148aa6b8d8373066123fc3dbf06/Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b", size = 358517, upload-time = "2024-10-18T12:32:54.066Z" }, -] - -[[package]] -name = "brotlicffi" -version = "1.1.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/95/9d/70caa61192f570fcf0352766331b735afa931b4c6bc9a348a0925cc13288/brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13", size = 465192, upload-time = "2023-09-14T14:22:40.707Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/11/7b96009d3dcc2c931e828ce1e157f03824a69fb728d06bfd7b2fc6f93718/brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851", size = 453786, upload-time = "2023-09-14T14:21:57.72Z" }, - { url = "https://files.pythonhosted.org/packages/d6/e6/a8f46f4a4ee7856fbd6ac0c6fb0dc65ed181ba46cd77875b8d9bbe494d9e/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b", size = 2911165, upload-time = "2023-09-14T14:21:59.613Z" }, - { url = "https://files.pythonhosted.org/packages/be/20/201559dff14e83ba345a5ec03335607e47467b6633c210607e693aefac40/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9feb210d932ffe7798ee62e6145d3a757eb6233aa9a4e7db78dd3690d7755814", size = 2927895, upload-time = "2023-09-14T14:22:01.22Z" }, - { url = "https://files.pythonhosted.org/packages/cd/15/695b1409264143be3c933f708a3f81d53c4a1e1ebbc06f46331decbf6563/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84763dbdef5dd5c24b75597a77e1b30c66604725707565188ba54bab4f114820", size = 2851834, upload-time = "2023-09-14T14:22:03.571Z" }, - { url = "https://files.pythonhosted.org/packages/b4/40/b961a702463b6005baf952794c2e9e0099bde657d0d7e007f923883b907f/brotlicffi-1.1.0.0-cp37-abi3-win32.whl", hash = "sha256:1b12b50e07c3911e1efa3a8971543e7648100713d4e0971b13631cce22c587eb", size = 341731, upload-time = "2023-09-14T14:22:05.74Z" }, - { url = "https://files.pythonhosted.org/packages/1c/fa/5408a03c041114ceab628ce21766a4ea882aa6f6f0a800e04ee3a30ec6b9/brotlicffi-1.1.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:994a4f0681bb6c6c3b0925530a1926b7a189d878e6e5e38fae8efa47c5d9c613", size = 366783, upload-time = "2023-09-14T14:22:07.096Z" }, - { url = "https://files.pythonhosted.org/packages/e5/3b/bd4f3d2bcf2306ae66b0346f5b42af1962480b200096ffc7abc3bd130eca/brotlicffi-1.1.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2e4aeb0bd2540cb91b069dbdd54d458da8c4334ceaf2d25df2f4af576d6766ca", size = 397397, upload-time = "2023-09-14T14:22:08.519Z" }, - { url = "https://files.pythonhosted.org/packages/54/10/1fd57864449360852c535c2381ee7120ba8f390aa3869df967c44ca7eba1/brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b7b0033b0d37bb33009fb2fef73310e432e76f688af76c156b3594389d81391", size = 379698, upload-time = "2023-09-14T14:22:10.52Z" }, - { url = "https://files.pythonhosted.org/packages/e5/95/15aa422aa6450e6556e54a5fd1650ff59f470aed77ac739aa90ab63dc611/brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54a07bb2374a1eba8ebb52b6fafffa2afd3c4df85ddd38fcc0511f2bb387c2a8", size = 378635, upload-time = "2023-09-14T14:22:11.982Z" }, - { url = "https://files.pythonhosted.org/packages/6c/a7/f254e13b2cb43337d6d99a4ec10394c134e41bfda8a2eff15b75627f4a3d/brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7901a7dc4b88f1c1475de59ae9be59799db1007b7d059817948d8e4f12e24e35", size = 385719, upload-time = "2023-09-14T14:22:13.483Z" }, - { url = "https://files.pythonhosted.org/packages/72/a9/0971251c4427c14b2a827dba3d910d4d3330dabf23d4278bf6d06a978847/brotlicffi-1.1.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce01c7316aebc7fce59da734286148b1d1b9455f89cf2c8a4dfce7d41db55c2d", size = 361760, upload-time = "2023-09-14T14:22:14.767Z" }, -] - -[[package]] -name = "browserforge" -version = "1.2.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/df/5c/fe4d8cc5d5e61a5b1585190bba19d25bb76c45fdfe9c7bf264f5301fcf33/browserforge-1.2.3.tar.gz", hash = "sha256:d5bec6dffd4748b30fbac9f9c1ef33b26c01a23185240bf90011843e174b7ecc", size = 38072, upload-time = "2025-01-29T09:45:48.711Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/53/c60eb5bd26cf8689e361031bebc431437bc988555e80ba52d48c12c1d866/browserforge-1.2.3-py3-none-any.whl", hash = "sha256:a6c71ed4688b2f1b0bee757ca82ddad0007cbba68a71eca66ca607dde382f132", size = 39626, upload-time = "2025-01-29T09:45:47.531Z" }, -] - [[package]] name = "build" version = "1.3.0" @@ -583,15 +474,12 @@ toml = [ [[package]] name = "crawlee" -version = "0.6.12" -source = { registry = "https://pypi.org/simple" } +version = "0.6.13" +source = { git = "https://github.com/apify/crawlee-python.git?rev=master#55a763fe12e8bf5ccd0a70c455e00e3bc2ced279" } dependencies = [ - { name = "apify-fingerprint-datapoints" }, - { name = "browserforge" }, { name = "cachetools" }, { name = "colorama" }, - { name = "eval-type-backport" }, - { name = "httpx", extra = ["brotli", "http2", "zstd"] }, + { name = "impit" }, { name = "more-itertools" }, { name = "protego" }, { name = "psutil" }, @@ -604,10 +492,6 @@ dependencies = [ { name = "typing-extensions" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/56/3f/e8321d2339bf539f2c0d634e82900795eb810415bb25d0519b852ec6dc9e/crawlee-0.6.12.tar.gz", hash = "sha256:ab1785c1b3f71ebe3af84abe0a74b9e1de0f6516c9fac94a9a0f2df1efcb1387", size = 24156909, upload-time = "2025-07-30T11:45:49.563Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/f8/baf96096c5e283eca7e7424c81e47c605c9937ca45252d69eb317e4ed7d3/crawlee-0.6.12-py3-none-any.whl", hash = "sha256:4969ee0139550153187c3a31e70ba45cfd708aca007cbc21e43b39b9b50e74d3", size = 263738, upload-time = "2025-07-30T11:45:46.572Z" }, -] [package.optional-dependencies] parsel = [ @@ -616,49 +500,49 @@ parsel = [ [[package]] name = "cryptography" -version = "45.0.6" +version = "45.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d6/0d/d13399c94234ee8f3df384819dc67e0c5ce215fb751d567a55a1f4b028c7/cryptography-45.0.6.tar.gz", hash = "sha256:5c966c732cf6e4a276ce83b6e4c729edda2df6929083a952cc7da973c539c719", size = 744949, upload-time = "2025-08-05T23:59:27.93Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/29/2793d178d0eda1ca4a09a7c4e09a5185e75738cc6d526433e8663b460ea6/cryptography-45.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:048e7ad9e08cf4c0ab07ff7f36cc3115924e22e2266e034450a890d9e312dd74", size = 7042702, upload-time = "2025-08-05T23:58:23.464Z" }, - { url = "https://files.pythonhosted.org/packages/b3/b6/cabd07410f222f32c8d55486c464f432808abaa1f12af9afcbe8f2f19030/cryptography-45.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44647c5d796f5fc042bbc6d61307d04bf29bccb74d188f18051b635f20a9c75f", size = 4206483, upload-time = "2025-08-05T23:58:27.132Z" }, - { url = "https://files.pythonhosted.org/packages/8b/9e/f9c7d36a38b1cfeb1cc74849aabe9bf817990f7603ff6eb485e0d70e0b27/cryptography-45.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e40b80ecf35ec265c452eea0ba94c9587ca763e739b8e559c128d23bff7ebbbf", size = 4429679, upload-time = "2025-08-05T23:58:29.152Z" }, - { url = "https://files.pythonhosted.org/packages/9c/2a/4434c17eb32ef30b254b9e8b9830cee4e516f08b47fdd291c5b1255b8101/cryptography-45.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:00e8724bdad672d75e6f069b27970883179bd472cd24a63f6e620ca7e41cc0c5", size = 4210553, upload-time = "2025-08-05T23:58:30.596Z" }, - { url = "https://files.pythonhosted.org/packages/ef/1d/09a5df8e0c4b7970f5d1f3aff1b640df6d4be28a64cae970d56c6cf1c772/cryptography-45.0.6-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a3085d1b319d35296176af31c90338eeb2ddac8104661df79f80e1d9787b8b2", size = 3894499, upload-time = "2025-08-05T23:58:32.03Z" }, - { url = "https://files.pythonhosted.org/packages/79/62/120842ab20d9150a9d3a6bdc07fe2870384e82f5266d41c53b08a3a96b34/cryptography-45.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1b7fa6a1c1188c7ee32e47590d16a5a0646270921f8020efc9a511648e1b2e08", size = 4458484, upload-time = "2025-08-05T23:58:33.526Z" }, - { url = "https://files.pythonhosted.org/packages/fd/80/1bc3634d45ddfed0871bfba52cf8f1ad724761662a0c792b97a951fb1b30/cryptography-45.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:275ba5cc0d9e320cd70f8e7b96d9e59903c815ca579ab96c1e37278d231fc402", size = 4210281, upload-time = "2025-08-05T23:58:35.445Z" }, - { url = "https://files.pythonhosted.org/packages/7d/fe/ffb12c2d83d0ee625f124880a1f023b5878f79da92e64c37962bbbe35f3f/cryptography-45.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f4028f29a9f38a2025abedb2e409973709c660d44319c61762202206ed577c42", size = 4456890, upload-time = "2025-08-05T23:58:36.923Z" }, - { url = "https://files.pythonhosted.org/packages/8c/8e/b3f3fe0dc82c77a0deb5f493b23311e09193f2268b77196ec0f7a36e3f3e/cryptography-45.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ee411a1b977f40bd075392c80c10b58025ee5c6b47a822a33c1198598a7a5f05", size = 4333247, upload-time = "2025-08-05T23:58:38.781Z" }, - { url = "https://files.pythonhosted.org/packages/b3/a6/c3ef2ab9e334da27a1d7b56af4a2417d77e7806b2e0f90d6267ce120d2e4/cryptography-45.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e2a21a8eda2d86bb604934b6b37691585bd095c1f788530c1fcefc53a82b3453", size = 4565045, upload-time = "2025-08-05T23:58:40.415Z" }, - { url = "https://files.pythonhosted.org/packages/31/c3/77722446b13fa71dddd820a5faab4ce6db49e7e0bf8312ef4192a3f78e2f/cryptography-45.0.6-cp311-abi3-win32.whl", hash = "sha256:d063341378d7ee9c91f9d23b431a3502fc8bfacd54ef0a27baa72a0843b29159", size = 2928923, upload-time = "2025-08-05T23:58:41.919Z" }, - { url = "https://files.pythonhosted.org/packages/38/63/a025c3225188a811b82932a4dcc8457a26c3729d81578ccecbcce2cb784e/cryptography-45.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:833dc32dfc1e39b7376a87b9a6a4288a10aae234631268486558920029b086ec", size = 3403805, upload-time = "2025-08-05T23:58:43.792Z" }, - { url = "https://files.pythonhosted.org/packages/5b/af/bcfbea93a30809f126d51c074ee0fac5bd9d57d068edf56c2a73abedbea4/cryptography-45.0.6-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:3436128a60a5e5490603ab2adbabc8763613f638513ffa7d311c900a8349a2a0", size = 7020111, upload-time = "2025-08-05T23:58:45.316Z" }, - { url = "https://files.pythonhosted.org/packages/98/c6/ea5173689e014f1a8470899cd5beeb358e22bb3cf5a876060f9d1ca78af4/cryptography-45.0.6-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0d9ef57b6768d9fa58e92f4947cea96ade1233c0e236db22ba44748ffedca394", size = 4198169, upload-time = "2025-08-05T23:58:47.121Z" }, - { url = "https://files.pythonhosted.org/packages/ba/73/b12995edc0c7e2311ffb57ebd3b351f6b268fed37d93bfc6f9856e01c473/cryptography-45.0.6-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea3c42f2016a5bbf71825537c2ad753f2870191134933196bee408aac397b3d9", size = 4421273, upload-time = "2025-08-05T23:58:48.557Z" }, - { url = "https://files.pythonhosted.org/packages/f7/6e/286894f6f71926bc0da67408c853dd9ba953f662dcb70993a59fd499f111/cryptography-45.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:20ae4906a13716139d6d762ceb3e0e7e110f7955f3bc3876e3a07f5daadec5f3", size = 4199211, upload-time = "2025-08-05T23:58:50.139Z" }, - { url = "https://files.pythonhosted.org/packages/de/34/a7f55e39b9623c5cb571d77a6a90387fe557908ffc44f6872f26ca8ae270/cryptography-45.0.6-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dac5ec199038b8e131365e2324c03d20e97fe214af051d20c49db129844e8b3", size = 3883732, upload-time = "2025-08-05T23:58:52.253Z" }, - { url = "https://files.pythonhosted.org/packages/f9/b9/c6d32edbcba0cd9f5df90f29ed46a65c4631c4fbe11187feb9169c6ff506/cryptography-45.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:18f878a34b90d688982e43f4b700408b478102dd58b3e39de21b5ebf6509c301", size = 4450655, upload-time = "2025-08-05T23:58:53.848Z" }, - { url = "https://files.pythonhosted.org/packages/77/2d/09b097adfdee0227cfd4c699b3375a842080f065bab9014248933497c3f9/cryptography-45.0.6-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5bd6020c80c5b2b2242d6c48487d7b85700f5e0038e67b29d706f98440d66eb5", size = 4198956, upload-time = "2025-08-05T23:58:55.209Z" }, - { url = "https://files.pythonhosted.org/packages/55/66/061ec6689207d54effdff535bbdf85cc380d32dd5377173085812565cf38/cryptography-45.0.6-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:eccddbd986e43014263eda489abbddfbc287af5cddfd690477993dbb31e31016", size = 4449859, upload-time = "2025-08-05T23:58:56.639Z" }, - { url = "https://files.pythonhosted.org/packages/41/ff/e7d5a2ad2d035e5a2af116e1a3adb4d8fcd0be92a18032917a089c6e5028/cryptography-45.0.6-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:550ae02148206beb722cfe4ef0933f9352bab26b087af00e48fdfb9ade35c5b3", size = 4320254, upload-time = "2025-08-05T23:58:58.833Z" }, - { url = "https://files.pythonhosted.org/packages/82/27/092d311af22095d288f4db89fcaebadfb2f28944f3d790a4cf51fe5ddaeb/cryptography-45.0.6-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5b64e668fc3528e77efa51ca70fadcd6610e8ab231e3e06ae2bab3b31c2b8ed9", size = 4554815, upload-time = "2025-08-05T23:59:00.283Z" }, - { url = "https://files.pythonhosted.org/packages/7e/01/aa2f4940262d588a8fdf4edabe4cda45854d00ebc6eaac12568b3a491a16/cryptography-45.0.6-cp37-abi3-win32.whl", hash = "sha256:780c40fb751c7d2b0c6786ceee6b6f871e86e8718a8ff4bc35073ac353c7cd02", size = 2912147, upload-time = "2025-08-05T23:59:01.716Z" }, - { url = "https://files.pythonhosted.org/packages/0a/bc/16e0276078c2de3ceef6b5a34b965f4436215efac45313df90d55f0ba2d2/cryptography-45.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:20d15aed3ee522faac1a39fbfdfee25d17b1284bafd808e1640a74846d7c4d1b", size = 3390459, upload-time = "2025-08-05T23:59:03.358Z" }, - { url = "https://files.pythonhosted.org/packages/56/d2/4482d97c948c029be08cb29854a91bd2ae8da7eb9c4152461f1244dcea70/cryptography-45.0.6-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:705bb7c7ecc3d79a50f236adda12ca331c8e7ecfbea51edd931ce5a7a7c4f012", size = 3576812, upload-time = "2025-08-05T23:59:04.833Z" }, - { url = "https://files.pythonhosted.org/packages/ec/24/55fc238fcaa122855442604b8badb2d442367dfbd5a7ca4bb0bd346e263a/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:826b46dae41a1155a0c0e66fafba43d0ede1dc16570b95e40c4d83bfcf0a451d", size = 4141694, upload-time = "2025-08-05T23:59:06.66Z" }, - { url = "https://files.pythonhosted.org/packages/f9/7e/3ea4fa6fbe51baf3903806a0241c666b04c73d2358a3ecce09ebee8b9622/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cc4d66f5dc4dc37b89cfef1bd5044387f7a1f6f0abb490815628501909332d5d", size = 4375010, upload-time = "2025-08-05T23:59:08.14Z" }, - { url = "https://files.pythonhosted.org/packages/50/42/ec5a892d82d2a2c29f80fc19ced4ba669bca29f032faf6989609cff1f8dc/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f68f833a9d445cc49f01097d95c83a850795921b3f7cc6488731e69bde3288da", size = 4141377, upload-time = "2025-08-05T23:59:09.584Z" }, - { url = "https://files.pythonhosted.org/packages/e7/d7/246c4c973a22b9c2931999da953a2c19cae7c66b9154c2d62ffed811225e/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3b5bf5267e98661b9b888a9250d05b063220dfa917a8203744454573c7eb79db", size = 4374609, upload-time = "2025-08-05T23:59:11.923Z" }, - { url = "https://files.pythonhosted.org/packages/78/6d/c49ccf243f0a1b0781c2a8de8123ee552f0c8a417c6367a24d2ecb7c11b3/cryptography-45.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2384f2ab18d9be88a6e4f8972923405e2dbb8d3e16c6b43f15ca491d7831bd18", size = 3322156, upload-time = "2025-08-05T23:59:13.597Z" }, - { url = "https://files.pythonhosted.org/packages/61/69/c252de4ec047ba2f567ecb53149410219577d408c2aea9c989acae7eafce/cryptography-45.0.6-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fc022c1fa5acff6def2fc6d7819bbbd31ccddfe67d075331a65d9cfb28a20983", size = 3584669, upload-time = "2025-08-05T23:59:15.431Z" }, - { url = "https://files.pythonhosted.org/packages/e3/fe/deea71e9f310a31fe0a6bfee670955152128d309ea2d1c79e2a5ae0f0401/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3de77e4df42ac8d4e4d6cdb342d989803ad37707cf8f3fbf7b088c9cbdd46427", size = 4153022, upload-time = "2025-08-05T23:59:16.954Z" }, - { url = "https://files.pythonhosted.org/packages/60/45/a77452f5e49cb580feedba6606d66ae7b82c128947aa754533b3d1bd44b0/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:599c8d7df950aa68baa7e98f7b73f4f414c9f02d0e8104a30c0182a07732638b", size = 4386802, upload-time = "2025-08-05T23:59:18.55Z" }, - { url = "https://files.pythonhosted.org/packages/a3/b9/a2f747d2acd5e3075fdf5c145c7c3568895daaa38b3b0c960ef830db6cdc/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:31a2b9a10530a1cb04ffd6aa1cd4d3be9ed49f7d77a4dafe198f3b382f41545c", size = 4152706, upload-time = "2025-08-05T23:59:20.044Z" }, - { url = "https://files.pythonhosted.org/packages/81/ec/381b3e8d0685a3f3f304a382aa3dfce36af2d76467da0fd4bb21ddccc7b2/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:e5b3dda1b00fb41da3af4c5ef3f922a200e33ee5ba0f0bc9ecf0b0c173958385", size = 4386740, upload-time = "2025-08-05T23:59:21.525Z" }, - { url = "https://files.pythonhosted.org/packages/0a/76/cf8d69da8d0b5ecb0db406f24a63a3f69ba5e791a11b782aeeefef27ccbb/cryptography-45.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:629127cfdcdc6806dfe234734d7cb8ac54edaf572148274fa377a7d3405b0043", size = 3331874, upload-time = "2025-08-05T23:59:23.017Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/95/1e/49527ac611af559665f71cbb8f92b332b5ec9c6fbc4e88b0f8e92f5e85df/cryptography-45.0.5.tar.gz", hash = "sha256:72e76caa004ab63accdf26023fccd1d087f6d90ec6048ff33ad0445abf7f605a", size = 744903, upload-time = "2025-07-02T13:06:25.941Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/fb/09e28bc0c46d2c547085e60897fea96310574c70fb21cd58a730a45f3403/cryptography-45.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:101ee65078f6dd3e5a028d4f19c07ffa4dd22cce6a20eaa160f8b5219911e7d8", size = 7043092, upload-time = "2025-07-02T13:05:01.514Z" }, + { url = "https://files.pythonhosted.org/packages/b1/05/2194432935e29b91fb649f6149c1a4f9e6d3d9fc880919f4ad1bcc22641e/cryptography-45.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3a264aae5f7fbb089dbc01e0242d3b67dffe3e6292e1f5182122bdf58e65215d", size = 4205926, upload-time = "2025-07-02T13:05:04.741Z" }, + { url = "https://files.pythonhosted.org/packages/07/8b/9ef5da82350175e32de245646b1884fc01124f53eb31164c77f95a08d682/cryptography-45.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e74d30ec9c7cb2f404af331d5b4099a9b322a8a6b25c4632755c8757345baac5", size = 4429235, upload-time = "2025-07-02T13:05:07.084Z" }, + { url = "https://files.pythonhosted.org/packages/7c/e1/c809f398adde1994ee53438912192d92a1d0fc0f2d7582659d9ef4c28b0c/cryptography-45.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3af26738f2db354aafe492fb3869e955b12b2ef2e16908c8b9cb928128d42c57", size = 4209785, upload-time = "2025-07-02T13:05:09.321Z" }, + { url = "https://files.pythonhosted.org/packages/d0/8b/07eb6bd5acff58406c5e806eff34a124936f41a4fb52909ffa4d00815f8c/cryptography-45.0.5-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e6c00130ed423201c5bc5544c23359141660b07999ad82e34e7bb8f882bb78e0", size = 3893050, upload-time = "2025-07-02T13:05:11.069Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ef/3333295ed58d900a13c92806b67e62f27876845a9a908c939f040887cca9/cryptography-45.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:dd420e577921c8c2d31289536c386aaa30140b473835e97f83bc71ea9d2baf2d", size = 4457379, upload-time = "2025-07-02T13:05:13.32Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9d/44080674dee514dbb82b21d6fa5d1055368f208304e2ab1828d85c9de8f4/cryptography-45.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d05a38884db2ba215218745f0781775806bde4f32e07b135348355fe8e4991d9", size = 4209355, upload-time = "2025-07-02T13:05:15.017Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d8/0749f7d39f53f8258e5c18a93131919ac465ee1f9dccaf1b3f420235e0b5/cryptography-45.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ad0caded895a00261a5b4aa9af828baede54638754b51955a0ac75576b831b27", size = 4456087, upload-time = "2025-07-02T13:05:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/09/d7/92acac187387bf08902b0bf0699816f08553927bdd6ba3654da0010289b4/cryptography-45.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9024beb59aca9d31d36fcdc1604dd9bbeed0a55bface9f1908df19178e2f116e", size = 4332873, upload-time = "2025-07-02T13:05:18.743Z" }, + { url = "https://files.pythonhosted.org/packages/03/c2/840e0710da5106a7c3d4153c7215b2736151bba60bf4491bdb421df5056d/cryptography-45.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:91098f02ca81579c85f66df8a588c78f331ca19089763d733e34ad359f474174", size = 4564651, upload-time = "2025-07-02T13:05:21.382Z" }, + { url = "https://files.pythonhosted.org/packages/2e/92/cc723dd6d71e9747a887b94eb3827825c6c24b9e6ce2bb33b847d31d5eaa/cryptography-45.0.5-cp311-abi3-win32.whl", hash = "sha256:926c3ea71a6043921050eaa639137e13dbe7b4ab25800932a8498364fc1abec9", size = 2929050, upload-time = "2025-07-02T13:05:23.39Z" }, + { url = "https://files.pythonhosted.org/packages/1f/10/197da38a5911a48dd5389c043de4aec4b3c94cb836299b01253940788d78/cryptography-45.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:b85980d1e345fe769cfc57c57db2b59cff5464ee0c045d52c0df087e926fbe63", size = 3403224, upload-time = "2025-07-02T13:05:25.202Z" }, + { url = "https://files.pythonhosted.org/packages/fe/2b/160ce8c2765e7a481ce57d55eba1546148583e7b6f85514472b1d151711d/cryptography-45.0.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3562c2f23c612f2e4a6964a61d942f891d29ee320edb62ff48ffb99f3de9ae8", size = 7017143, upload-time = "2025-07-02T13:05:27.229Z" }, + { url = "https://files.pythonhosted.org/packages/c2/e7/2187be2f871c0221a81f55ee3105d3cf3e273c0a0853651d7011eada0d7e/cryptography-45.0.5-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3fcfbefc4a7f332dece7272a88e410f611e79458fab97b5efe14e54fe476f4fd", size = 4197780, upload-time = "2025-07-02T13:05:29.299Z" }, + { url = "https://files.pythonhosted.org/packages/b9/cf/84210c447c06104e6be9122661159ad4ce7a8190011669afceeaea150524/cryptography-45.0.5-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:460f8c39ba66af7db0545a8c6f2eabcbc5a5528fc1cf6c3fa9a1e44cec33385e", size = 4420091, upload-time = "2025-07-02T13:05:31.221Z" }, + { url = "https://files.pythonhosted.org/packages/3e/6a/cb8b5c8bb82fafffa23aeff8d3a39822593cee6e2f16c5ca5c2ecca344f7/cryptography-45.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9b4cf6318915dccfe218e69bbec417fdd7c7185aa7aab139a2c0beb7468c89f0", size = 4198711, upload-time = "2025-07-02T13:05:33.062Z" }, + { url = "https://files.pythonhosted.org/packages/04/f7/36d2d69df69c94cbb2473871926daf0f01ad8e00fe3986ac3c1e8c4ca4b3/cryptography-45.0.5-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2089cc8f70a6e454601525e5bf2779e665d7865af002a5dec8d14e561002e135", size = 3883299, upload-time = "2025-07-02T13:05:34.94Z" }, + { url = "https://files.pythonhosted.org/packages/82/c7/f0ea40f016de72f81288e9fe8d1f6748036cb5ba6118774317a3ffc6022d/cryptography-45.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0027d566d65a38497bc37e0dd7c2f8ceda73597d2ac9ba93810204f56f52ebc7", size = 4450558, upload-time = "2025-07-02T13:05:37.288Z" }, + { url = "https://files.pythonhosted.org/packages/06/ae/94b504dc1a3cdf642d710407c62e86296f7da9e66f27ab12a1ee6fdf005b/cryptography-45.0.5-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:be97d3a19c16a9be00edf79dca949c8fa7eff621763666a145f9f9535a5d7f42", size = 4198020, upload-time = "2025-07-02T13:05:39.102Z" }, + { url = "https://files.pythonhosted.org/packages/05/2b/aaf0adb845d5dabb43480f18f7ca72e94f92c280aa983ddbd0bcd6ecd037/cryptography-45.0.5-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:7760c1c2e1a7084153a0f68fab76e754083b126a47d0117c9ed15e69e2103492", size = 4449759, upload-time = "2025-07-02T13:05:41.398Z" }, + { url = "https://files.pythonhosted.org/packages/91/e4/f17e02066de63e0100a3a01b56f8f1016973a1d67551beaf585157a86b3f/cryptography-45.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6ff8728d8d890b3dda5765276d1bc6fb099252915a2cd3aff960c4c195745dd0", size = 4319991, upload-time = "2025-07-02T13:05:43.64Z" }, + { url = "https://files.pythonhosted.org/packages/f2/2e/e2dbd629481b499b14516eed933f3276eb3239f7cee2dcfa4ee6b44d4711/cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a", size = 4554189, upload-time = "2025-07-02T13:05:46.045Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ea/a78a0c38f4c8736287b71c2ea3799d173d5ce778c7d6e3c163a95a05ad2a/cryptography-45.0.5-cp37-abi3-win32.whl", hash = "sha256:1e1da5accc0c750056c556a93c3e9cb828970206c68867712ca5805e46dc806f", size = 2911769, upload-time = "2025-07-02T13:05:48.329Z" }, + { url = "https://files.pythonhosted.org/packages/79/b3/28ac139109d9005ad3f6b6f8976ffede6706a6478e21c889ce36c840918e/cryptography-45.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:90cb0a7bb35959f37e23303b7eed0a32280510030daba3f7fdfbb65defde6a97", size = 3390016, upload-time = "2025-07-02T13:05:50.811Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8b/34394337abe4566848a2bd49b26bcd4b07fd466afd3e8cce4cb79a390869/cryptography-45.0.5-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:206210d03c1193f4e1ff681d22885181d47efa1ab3018766a7b32a7b3d6e6afd", size = 3575762, upload-time = "2025-07-02T13:05:53.166Z" }, + { url = "https://files.pythonhosted.org/packages/8b/5d/a19441c1e89afb0f173ac13178606ca6fab0d3bd3ebc29e9ed1318b507fc/cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c648025b6840fe62e57107e0a25f604db740e728bd67da4f6f060f03017d5097", size = 4140906, upload-time = "2025-07-02T13:05:55.914Z" }, + { url = "https://files.pythonhosted.org/packages/4b/db/daceb259982a3c2da4e619f45b5bfdec0e922a23de213b2636e78ef0919b/cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b8fa8b0a35a9982a3c60ec79905ba5bb090fc0b9addcfd3dc2dd04267e45f25e", size = 4374411, upload-time = "2025-07-02T13:05:57.814Z" }, + { url = "https://files.pythonhosted.org/packages/6a/35/5d06ad06402fc522c8bf7eab73422d05e789b4e38fe3206a85e3d6966c11/cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:14d96584701a887763384f3c47f0ca7c1cce322aa1c31172680eb596b890ec30", size = 4140942, upload-time = "2025-07-02T13:06:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/65/79/020a5413347e44c382ef1f7f7e7a66817cd6273e3e6b5a72d18177b08b2f/cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57c816dfbd1659a367831baca4b775b2a5b43c003daf52e9d57e1d30bc2e1b0e", size = 4374079, upload-time = "2025-07-02T13:06:02.043Z" }, + { url = "https://files.pythonhosted.org/packages/9b/c5/c0e07d84a9a2a8a0ed4f865e58f37c71af3eab7d5e094ff1b21f3f3af3bc/cryptography-45.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b9e38e0a83cd51e07f5a48ff9691cae95a79bea28fe4ded168a8e5c6c77e819d", size = 3321362, upload-time = "2025-07-02T13:06:04.463Z" }, + { url = "https://files.pythonhosted.org/packages/c0/71/9bdbcfd58d6ff5084687fe722c58ac718ebedbc98b9f8f93781354e6d286/cryptography-45.0.5-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8c4a6ff8a30e9e3d38ac0539e9a9e02540ab3f827a3394f8852432f6b0ea152e", size = 3587878, upload-time = "2025-07-02T13:06:06.339Z" }, + { url = "https://files.pythonhosted.org/packages/f0/63/83516cfb87f4a8756eaa4203f93b283fda23d210fc14e1e594bd5f20edb6/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bd4c45986472694e5121084c6ebbd112aa919a25e783b87eb95953c9573906d6", size = 4152447, upload-time = "2025-07-02T13:06:08.345Z" }, + { url = "https://files.pythonhosted.org/packages/22/11/d2823d2a5a0bd5802b3565437add16f5c8ce1f0778bf3822f89ad2740a38/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:982518cd64c54fcada9d7e5cf28eabd3ee76bd03ab18e08a48cad7e8b6f31b18", size = 4386778, upload-time = "2025-07-02T13:06:10.263Z" }, + { url = "https://files.pythonhosted.org/packages/5f/38/6bf177ca6bce4fe14704ab3e93627c5b0ca05242261a2e43ef3168472540/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:12e55281d993a793b0e883066f590c1ae1e802e3acb67f8b442e721e475e6463", size = 4151627, upload-time = "2025-07-02T13:06:13.097Z" }, + { url = "https://files.pythonhosted.org/packages/38/6a/69fc67e5266bff68a91bcb81dff8fb0aba4d79a78521a08812048913e16f/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:5aa1e32983d4443e310f726ee4b071ab7569f58eedfdd65e9675484a4eb67bd1", size = 4385593, upload-time = "2025-07-02T13:06:15.689Z" }, + { url = "https://files.pythonhosted.org/packages/f6/34/31a1604c9a9ade0fdab61eb48570e09a796f4d9836121266447b0eaf7feb/cryptography-45.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e357286c1b76403dd384d938f93c46b2b058ed4dfcdce64a770f0537ed3feb6f", size = 3331106, upload-time = "2025-07-02T13:06:18.058Z" }, ] [[package]] @@ -786,15 +670,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f8/1a/25272fafd13c92a2e3b8e351127410b9ea5557324bfea3552388d65797fc/dycw_pytest_only-2.1.1-py3-none-any.whl", hash = "sha256:ea8fe48878dd95ad0ca804e549225cf3b7a1928eb188c22a284c1d17b48a7b89", size = 2413, upload-time = "2025-06-03T01:04:46.585Z" }, ] -[[package]] -name = "eval-type-backport" -version = "0.2.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/ea/8b0ac4469d4c347c6a385ff09dc3c048c2d021696664e26c7ee6791631b5/eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1", size = 9079, upload-time = "2024-12-21T20:09:46.005Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/31/55cd413eaccd39125368be33c46de24a1f639f2e12349b0361b4678f3915/eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a", size = 5830, upload-time = "2024-12-21T20:09:44.175Z" }, -] - [[package]] name = "exceptiongroup" version = "1.3.0" @@ -846,28 +721,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, ] -[[package]] -name = "h2" -version = "4.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "hpack" }, - { name = "hyperframe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/1b/38/d7f80fd13e6582fb8e0df8c9a653dcc02b03ca34f4d72f34869298c5baf8/h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f", size = 2150682, upload-time = "2025-02-02T07:43:51.815Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0", size = 60957, upload-time = "2025-02-01T11:02:26.481Z" }, -] - -[[package]] -name = "hpack" -version = "4.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276, upload-time = "2025-01-22T21:44:58.347Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357, upload-time = "2025-01-22T21:44:56.92Z" }, -] - [[package]] name = "httpcore" version = "1.0.9" @@ -932,27 +785,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, ] -[package.optional-dependencies] -brotli = [ - { name = "brotli", marker = "platform_python_implementation == 'CPython'" }, - { name = "brotlicffi", marker = "platform_python_implementation != 'CPython'" }, -] -http2 = [ - { name = "h2" }, -] -zstd = [ - { name = "zstandard" }, -] - -[[package]] -name = "hyperframe" -version = "6.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566, upload-time = "2025-01-22T21:41:49.302Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, -] - [[package]] name = "hyperlink" version = "21.0.0" @@ -983,6 +815,47 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] +[[package]] +name = "impit" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/84/82/da7f6ebec2ae2e4071c7f97d5d09710ec205eb7a5660674bf2b0e43969ad/impit-0.5.0.tar.gz", hash = "sha256:c1f27d046fcf53b1ad9f63897a666a4f32eb53763245b4c2047c826991675ba5", size = 87921, upload-time = "2025-07-30T11:51:42.266Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/07/717c47aae5be96bb9d622c26a676a3f88e6ba6846c0b590b9e1f15dcaac6/impit-0.5.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bb2a8befe3b5bd2d32b116f9a76b41699a1ecc64e53e9643adb98837bba0c32e", size = 3840719, upload-time = "2025-07-30T11:50:30.974Z" }, + { url = "https://files.pythonhosted.org/packages/d3/56/20843b4e913c691b69f8a86483c64d1b0c84c17a20588b53acffffa67616/impit-0.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f9bcd170ad3b53ea0d2c585efc7f2f23b7942c7e9b41a505d4bdc4a928580f6", size = 3667648, upload-time = "2025-07-30T11:50:33.062Z" }, + { url = "https://files.pythonhosted.org/packages/5c/f5/d77627559764f759c0eef1189ec6b7d62fea71889b84b41dd8359c31835f/impit-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bba63905ebcc625b63cdc6adce0bc8c80c95ad500b3fd02b8bb622fbb718beb", size = 6071606, upload-time = "2025-07-30T11:50:34.568Z" }, + { url = "https://files.pythonhosted.org/packages/a8/d1/6206195b8af11151eb4fe77e98113f4ec507ee70c2873e6a1f50620048f5/impit-0.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9b6fd9898d5983f353d806fab528e95305736b0eb67da5fb32ce0fade31cc80f", size = 6363184, upload-time = "2025-07-30T11:50:36.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/5f/682015b7f2017ef3d823d42ae66d614a948d74597b457030e129501f216f/impit-0.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:186c20ff24a2431b66674d405a3b8357e66553ce1de518568b136a2cd4aa0d39", size = 6223813, upload-time = "2025-07-30T11:50:38.367Z" }, + { url = "https://files.pythonhosted.org/packages/e8/51/3937cfc7357a1f70146bd4c61e012f219cfb86126f75cb8a6c3320c452a1/impit-0.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:095a003e6f88302f12720704cd2835435a9752b5b033d5263f6be5ee8880d434", size = 3876828, upload-time = "2025-07-30T11:50:40.216Z" }, + { url = "https://files.pythonhosted.org/packages/3a/e7/86ee335462a58590739ef44d851aeaffc131608582bbbb4b2b6dd6677eda/impit-0.5.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cbee5603b7d8da6a3f5f6fccba7f9c2b04813943db293b6fc6fff63d3e86686b", size = 3840617, upload-time = "2025-07-30T11:50:41.735Z" }, + { url = "https://files.pythonhosted.org/packages/df/e4/64b4f55fca0e63f03289c83beffe08b396a0c32015ef2fc28b8a8c09146f/impit-0.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c516f5bceb1757633c6291411fe8e255f81c7432f259da41e6408654b81dca6f", size = 3667696, upload-time = "2025-07-30T11:50:43.551Z" }, + { url = "https://files.pythonhosted.org/packages/e1/75/7186b5ce0e10c7a3995fb814a8e12772911180baf1ee7a4db55d558c1b02/impit-0.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e4516f5d9c48aa9278f227c9a902e79f4636f35398921998899071a1abc08f9", size = 6071710, upload-time = "2025-07-30T11:50:45.095Z" }, + { url = "https://files.pythonhosted.org/packages/dc/ec/a0b2a60e16de567be1403a6025f6daf8f40dc29629f1a2e5c828469fb987/impit-0.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d06287ba0bf51abc16e0bd763a727e03f419accd263c34ded3e10be93c971ceb", size = 6362938, upload-time = "2025-07-30T11:50:46.917Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a5/e9b16dda32008bd2e6a93dba1d82ddad6abacddd4b0c79792c03f244d16e/impit-0.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:19d5e75d2e0c6a69c59cdfdc88da7fd9a72c23684fc88439240294740f2b7515", size = 6219401, upload-time = "2025-07-30T11:50:48.441Z" }, + { url = "https://files.pythonhosted.org/packages/08/13/875fb538d16f39eef8dc9c634a4ae352a49a2a106b2b7cfdde67cd67212e/impit-0.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:f265c72c5aa8557244f80a230bfd548ea992db0b86323c672a87f379ad716957", size = 3876742, upload-time = "2025-07-30T11:50:49.863Z" }, + { url = "https://files.pythonhosted.org/packages/22/b0/8b9406eab662743a57e57066411b38b60f4f6dca91c954b64adc695ec3b6/impit-0.5.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ad1337dc93a4bb5e4075975f3234a7c85caf4ec6973a79b77cdfeb0087382238", size = 3840111, upload-time = "2025-07-30T11:50:51.318Z" }, + { url = "https://files.pythonhosted.org/packages/1c/ad/9385cb1d04eed2531d0df0a4902064a7d6fb3857abed1a86489f0e723834/impit-0.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc40983a60a7ee5eca8bbc8a9dfc0dc7865b94d26e8c86e3d550d06b1bebf3d7", size = 3666505, upload-time = "2025-07-30T11:50:53.101Z" }, + { url = "https://files.pythonhosted.org/packages/9d/08/dcd9a585f4f6b633dac11f295ef705974deeb98176e8c793350e777c8561/impit-0.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:527ace267d17254500695a0b177e7cfbfd842a860e3047cc93fe09fe009b33a6", size = 6071126, upload-time = "2025-07-30T11:50:54.666Z" }, + { url = "https://files.pythonhosted.org/packages/2f/c6/fbc7c826456220dd30888002d04d687163502f181baaf9d5165d45d8e221/impit-0.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:caa5f2c827d979a1d8e2badddac33c0f36b62646d29cb078090474c706097843", size = 6361672, upload-time = "2025-07-30T11:50:56.164Z" }, + { url = "https://files.pythonhosted.org/packages/90/2f/9db57fe1cd6b6cc7e2bac30e6f749d94ba8d3ff9108c0d2f72735fc68dc6/impit-0.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f3f07c460492a852e1081bc447dba59a63d2ea45abe82a1cbac745a402e2c9c3", size = 6218078, upload-time = "2025-07-30T11:50:57.773Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8f/2a6c06951ff52552a8a061a67ebdebc70c4719531072afd55a17227cc7b9/impit-0.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8dd7432d673bd6bf42a15d7919a9457c0cfd0eb0832fe2582298366d98fcc4ae", size = 3876072, upload-time = "2025-07-30T11:50:59.69Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ac/0e34d5760573a719ef92249757b9dd1436687ca88d7b29a959886ea0f116/impit-0.5.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:b38197943efe692aa77f18883e3022fa1fcd240da2f3b3ba5dee10b7bdf5e835", size = 3839934, upload-time = "2025-07-30T11:51:01.185Z" }, + { url = "https://files.pythonhosted.org/packages/3a/7a/3321a75bb82750f5f4e04c003b8179d9c3ff751e16705a2b227417d4c2e5/impit-0.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d923a236011da447211714e34d015e178611b7997f3401ab5f432ee66d5f7b69", size = 3666384, upload-time = "2025-07-30T11:51:02.685Z" }, + { url = "https://files.pythonhosted.org/packages/da/cc/762ce64c7a6d1603111aacae151712c98f35ca20ea671b760005f42998fe/impit-0.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45e2e4daeecf85f80d4d5b287d83ce06c690ef4ef1c178ebae2265d05e54ab7d", size = 6070912, upload-time = "2025-07-30T11:51:04.188Z" }, + { url = "https://files.pythonhosted.org/packages/81/95/e86e0a01da31e76a6c2beca2bd4506fc93cb33c46dede1a47c9e3e6c15bc/impit-0.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6c7069ad70671e4b3129eb9f3fe3f50658aafc7f0de50b45fce4b0768008237c", size = 6361875, upload-time = "2025-07-30T11:51:05.741Z" }, + { url = "https://files.pythonhosted.org/packages/ee/e6/9ba2c7c111cf3265c657f53de3fb498db6586c13fd262b59913429d468fc/impit-0.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b656f0e7c1707c2124ea836b29a7d3d475117537184f12314b61ed74dccc6004", size = 6217892, upload-time = "2025-07-30T11:51:07.706Z" }, + { url = "https://files.pythonhosted.org/packages/cd/8d/5d31b830d0f142126cfbe3402eeb573261011334c7596d18a05a8b054741/impit-0.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:dcdb4b2235284912e0fe66b4e6d924609e360d95f9c9dd9bfeb252fcd183ef74", size = 3875933, upload-time = "2025-07-30T11:51:09.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/e8/0def9b6b2ef540274ae5ceeb6fff5a51334411c1a578d2f3e1b9b6d6f62f/impit-0.5.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd5b9a51a24b5cd5708b7ae52c0a65162132dce46799f7e051e319f7f3ac5c9", size = 6361971, upload-time = "2025-07-30T11:51:11.46Z" }, + { url = "https://files.pythonhosted.org/packages/6a/6f/c597c55e745b6793c171e699c9d24466aa5586b4cfac37d839c423669586/impit-0.5.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:b6a8afd89ea056ad1657b3242b3331859e3b87bf1b8b913d8ca5ac12259776c9", size = 6219369, upload-time = "2025-07-30T11:51:13.994Z" }, + { url = "https://files.pythonhosted.org/packages/95/c3/3d983e2327e68459c52ac2f8ac5b91885dbdd8601d96b43a2a3a7c2399a1/impit-0.5.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08a756e64a44197591d1f41b682baded2f4cc891946c03e5af21078186779cb2", size = 6071007, upload-time = "2025-07-30T11:51:16.669Z" }, + { url = "https://files.pythonhosted.org/packages/0e/91/98c4aa4d8036e6eb94d26015d6693c491667fda3ffd0cebe367a4039c8a3/impit-0.5.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be03977267b81708bdc54e98186f18138b65af6e8a8f0859f9483ee39b292208", size = 6072884, upload-time = "2025-07-30T11:51:27.402Z" }, + { url = "https://files.pythonhosted.org/packages/a9/a4/aa4338f95f4d63bfe5dc4d40c5e83b400188926f496769a2da34e211c039/impit-0.5.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ebd100ed3a1d2017003e5f4323ef9ccb1bef7f68a3c70eace44dc80258ba61b1", size = 6363121, upload-time = "2025-07-30T11:51:29.254Z" }, + { url = "https://files.pythonhosted.org/packages/70/ba/0b6ff62fdccb66eb13cc058b69a1df08017f8318ae5adf813fad7cdc42a3/impit-0.5.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:8ee9f944ffdd039665f282be6ccec9a0ccd2e204a574f498d1bdd91f51dc4c93", size = 6220904, upload-time = "2025-07-30T11:51:30.706Z" }, + { url = "https://files.pythonhosted.org/packages/fa/6d/4b280ea4ad6b3b2731efa5aeef2c85c8c26d8408f591e1d3e2d3748b38eb/impit-0.5.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bcf04de0e970beccf95f71e1535bc7585a65b24eb7cdbbb7d6f13f9eb4533e3", size = 6072722, upload-time = "2025-07-30T11:51:32.321Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f1/8887e407d6330e9c3c98886466db78256a0b27246b6ef14e5418c965442f/impit-0.5.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a2ca6a988ded989d5331787dbca1539eefdcee202a38c188ec04525f4cb708d4", size = 6363170, upload-time = "2025-07-30T11:51:33.815Z" }, + { url = "https://files.pythonhosted.org/packages/03/73/6318d23468759f473c3f438d701e0efae3feb9fc1865b56d2a201a22e61f/impit-0.5.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:62a7942295ea8656fdb881a47a0e6b16edfb0e2d4dd07ad0b6f8d928efc1db66", size = 6220787, upload-time = "2025-07-30T11:51:36.663Z" }, +] + [[package]] name = "importlib-metadata" version = "8.7.0" @@ -1743,15 +1616,16 @@ wheels = [ [[package]] name = "pydantic-settings" -version = "2.6.1" +version = "2.10.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, + { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b5/d4/9dfbe238f45ad8b168f5c96ee49a3df0598ce18a0795a983b419949ce65b/pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0", size = 75646, upload-time = "2024-11-01T11:00:05.17Z" } +sdist = { url = "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee", size = 172583, upload-time = "2025-06-24T13:26:46.841Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/f9/ff95fd7d760af42f647ea87f9b8a383d891cdb5e5dbd4613edaeb094252a/pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87", size = 28595, upload-time = "2024-11-01T11:00:02.64Z" }, + { url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235, upload-time = "2025-06-24T13:26:45.485Z" }, ] [[package]] @@ -2209,6 +2083,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b6/33/38da585b06978d262cc2b2b45bc57ee75f0ce5e0b4ef1cab1b86461e9298/typeapi-2.2.4-py3-none-any.whl", hash = "sha256:bd6d5e5907fa47e0303bf254e7cc8712d4be4eb26d7ffaedb67c9e7844c53bb8", size = 26387, upload-time = "2025-01-29T11:40:12.328Z" }, ] +[[package]] +name = "types-cachetools" +version = "6.1.0.20250717" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/14/e98ea3b3fda81787659268bbf09dec56961c39db060fdca74cb521df0515/types_cachetools-6.1.0.20250717.tar.gz", hash = "sha256:4acc8e25de9f5f84dd176ea81dcffa7cb24393869bb2e59e692dfd0139a1e66f", size = 9105, upload-time = "2025-07-17T03:20:48.482Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/bb/554208964e901e9e1992a7ea0bcab1590a4b2e94d20a9e6200826110ec89/types_cachetools-6.1.0.20250717-py3-none-any.whl", hash = "sha256:bba4b8d42262460d24e570097d2d9040e60311934603caa642efd971f3658ed0", size = 8940, upload-time = "2025-07-17T03:20:47.375Z" }, +] + [[package]] name = "typing-extensions" version = "4.14.1" @@ -2298,16 +2181,16 @@ wheels = [ [[package]] name = "virtualenv" -version = "20.33.1" +version = "20.33.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/60/4f20960df6c7b363a18a55ab034c8f2bcd5d9770d1f94f9370ec104c1855/virtualenv-20.33.1.tar.gz", hash = "sha256:1b44478d9e261b3fb8baa5e74a0ca3bc0e05f21aa36167bf9cbf850e542765b8", size = 6082160, upload-time = "2025-08-05T16:10:55.605Z" } +sdist = { url = "https://files.pythonhosted.org/packages/db/2e/8a70dcbe8bf15213a08f9b0325ede04faca5d362922ae0d62ef0fa4b069d/virtualenv-20.33.0.tar.gz", hash = "sha256:47e0c0d2ef1801fce721708ccdf2a28b9403fa2307c3268aebd03225976f61d2", size = 6082069, upload-time = "2025-08-03T08:09:19.014Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ca/ff/ded57ac5ff40a09e6e198550bab075d780941e0b0f83cbeabd087c59383a/virtualenv-20.33.1-py3-none-any.whl", hash = "sha256:07c19bc66c11acab6a5958b815cbcee30891cd1c2ccf53785a28651a0d8d8a67", size = 6060362, upload-time = "2025-08-05T16:10:52.81Z" }, + { url = "https://files.pythonhosted.org/packages/43/87/b22cf40cdf7e2b2bf83f38a94d2c90c5ad6c304896e5a12d0c08a602eb59/virtualenv-20.33.0-py3-none-any.whl", hash = "sha256:106b6baa8ab1b526d5a9b71165c85c456fbd49b16976c88e2bc9352ee3bc5d3f", size = 6060205, upload-time = "2025-08-03T08:09:16.674Z" }, ] [[package]] @@ -2741,78 +2624,3 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b6/66/ac05b741c2129fdf668b85631d2268421c5cd1a9ff99be1674371139d665/zope.interface-7.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a71a5b541078d0ebe373a81a3b7e71432c61d12e660f1d67896ca62d9628045b", size = 264696, upload-time = "2024-11-28T08:48:41.161Z" }, { url = "https://files.pythonhosted.org/packages/0a/2f/1bccc6f4cc882662162a1158cda1a7f616add2ffe322b28c99cb031b4ffc/zope.interface-7.2-cp313-cp313-win_amd64.whl", hash = "sha256:4893395d5dd2ba655c38ceb13014fd65667740f09fa5bb01caa1e6284e48c0cd", size = 212472, upload-time = "2024-11-28T08:49:56.587Z" }, ] - -[[package]] -name = "zstandard" -version = "0.23.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi", marker = "platform_python_implementation == 'PyPy'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ed/f6/2ac0287b442160a89d726b17a9184a4c615bb5237db763791a7fd16d9df1/zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09", size = 681701, upload-time = "2024-07-15T00:18:06.141Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/55/bd0487e86679db1823fc9ee0d8c9c78ae2413d34c0b461193b5f4c31d22f/zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9", size = 788701, upload-time = "2024-07-15T00:13:27.351Z" }, - { url = "https://files.pythonhosted.org/packages/e1/8a/ccb516b684f3ad987dfee27570d635822e3038645b1a950c5e8022df1145/zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880", size = 633678, upload-time = "2024-07-15T00:13:30.24Z" }, - { url = "https://files.pythonhosted.org/packages/12/89/75e633d0611c028e0d9af6df199423bf43f54bea5007e6718ab7132e234c/zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc", size = 4941098, upload-time = "2024-07-15T00:13:32.526Z" }, - { url = "https://files.pythonhosted.org/packages/4a/7a/bd7f6a21802de358b63f1ee636ab823711c25ce043a3e9f043b4fcb5ba32/zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573", size = 5308798, upload-time = "2024-07-15T00:13:34.925Z" }, - { url = "https://files.pythonhosted.org/packages/79/3b/775f851a4a65013e88ca559c8ae42ac1352db6fcd96b028d0df4d7d1d7b4/zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391", size = 5341840, upload-time = "2024-07-15T00:13:37.376Z" }, - { url = "https://files.pythonhosted.org/packages/09/4f/0cc49570141dd72d4d95dd6fcf09328d1b702c47a6ec12fbed3b8aed18a5/zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e", size = 5440337, upload-time = "2024-07-15T00:13:39.772Z" }, - { url = "https://files.pythonhosted.org/packages/e7/7c/aaa7cd27148bae2dc095191529c0570d16058c54c4597a7d118de4b21676/zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd", size = 4861182, upload-time = "2024-07-15T00:13:42.495Z" }, - { url = "https://files.pythonhosted.org/packages/ac/eb/4b58b5c071d177f7dc027129d20bd2a44161faca6592a67f8fcb0b88b3ae/zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4", size = 4932936, upload-time = "2024-07-15T00:13:44.234Z" }, - { url = "https://files.pythonhosted.org/packages/44/f9/21a5fb9bb7c9a274b05ad700a82ad22ce82f7ef0f485980a1e98ed6e8c5f/zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea", size = 5464705, upload-time = "2024-07-15T00:13:46.822Z" }, - { url = "https://files.pythonhosted.org/packages/49/74/b7b3e61db3f88632776b78b1db597af3f44c91ce17d533e14a25ce6a2816/zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2", size = 4857882, upload-time = "2024-07-15T00:13:49.297Z" }, - { url = "https://files.pythonhosted.org/packages/4a/7f/d8eb1cb123d8e4c541d4465167080bec88481ab54cd0b31eb4013ba04b95/zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9", size = 4697672, upload-time = "2024-07-15T00:13:51.447Z" }, - { url = "https://files.pythonhosted.org/packages/5e/05/f7dccdf3d121309b60342da454d3e706453a31073e2c4dac8e1581861e44/zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a", size = 5206043, upload-time = "2024-07-15T00:13:53.587Z" }, - { url = "https://files.pythonhosted.org/packages/86/9d/3677a02e172dccd8dd3a941307621c0cbd7691d77cb435ac3c75ab6a3105/zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0", size = 5667390, upload-time = "2024-07-15T00:13:56.137Z" }, - { url = "https://files.pythonhosted.org/packages/41/7e/0012a02458e74a7ba122cd9cafe491facc602c9a17f590367da369929498/zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c", size = 5198901, upload-time = "2024-07-15T00:13:58.584Z" }, - { url = "https://files.pythonhosted.org/packages/65/3a/8f715b97bd7bcfc7342d8adcd99a026cb2fb550e44866a3b6c348e1b0f02/zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813", size = 430596, upload-time = "2024-07-15T00:14:00.693Z" }, - { url = "https://files.pythonhosted.org/packages/19/b7/b2b9eca5e5a01111e4fe8a8ffb56bdcdf56b12448a24effe6cfe4a252034/zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4", size = 495498, upload-time = "2024-07-15T00:14:02.741Z" }, - { url = "https://files.pythonhosted.org/packages/9e/40/f67e7d2c25a0e2dc1744dd781110b0b60306657f8696cafb7ad7579469bd/zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e", size = 788699, upload-time = "2024-07-15T00:14:04.909Z" }, - { url = "https://files.pythonhosted.org/packages/e8/46/66d5b55f4d737dd6ab75851b224abf0afe5774976fe511a54d2eb9063a41/zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23", size = 633681, upload-time = "2024-07-15T00:14:13.99Z" }, - { url = "https://files.pythonhosted.org/packages/63/b6/677e65c095d8e12b66b8f862b069bcf1f1d781b9c9c6f12eb55000d57583/zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a", size = 4944328, upload-time = "2024-07-15T00:14:16.588Z" }, - { url = "https://files.pythonhosted.org/packages/59/cc/e76acb4c42afa05a9d20827116d1f9287e9c32b7ad58cc3af0721ce2b481/zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db", size = 5311955, upload-time = "2024-07-15T00:14:19.389Z" }, - { url = "https://files.pythonhosted.org/packages/78/e4/644b8075f18fc7f632130c32e8f36f6dc1b93065bf2dd87f03223b187f26/zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2", size = 5344944, upload-time = "2024-07-15T00:14:22.173Z" }, - { url = "https://files.pythonhosted.org/packages/76/3f/dbafccf19cfeca25bbabf6f2dd81796b7218f768ec400f043edc767015a6/zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca", size = 5442927, upload-time = "2024-07-15T00:14:24.825Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c3/d24a01a19b6733b9f218e94d1a87c477d523237e07f94899e1c10f6fd06c/zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c", size = 4864910, upload-time = "2024-07-15T00:14:26.982Z" }, - { url = "https://files.pythonhosted.org/packages/1c/a9/cf8f78ead4597264f7618d0875be01f9bc23c9d1d11afb6d225b867cb423/zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e", size = 4935544, upload-time = "2024-07-15T00:14:29.582Z" }, - { url = "https://files.pythonhosted.org/packages/2c/96/8af1e3731b67965fb995a940c04a2c20997a7b3b14826b9d1301cf160879/zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5", size = 5467094, upload-time = "2024-07-15T00:14:40.126Z" }, - { url = "https://files.pythonhosted.org/packages/ff/57/43ea9df642c636cb79f88a13ab07d92d88d3bfe3e550b55a25a07a26d878/zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48", size = 4860440, upload-time = "2024-07-15T00:14:42.786Z" }, - { url = "https://files.pythonhosted.org/packages/46/37/edb78f33c7f44f806525f27baa300341918fd4c4af9472fbc2c3094be2e8/zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c", size = 4700091, upload-time = "2024-07-15T00:14:45.184Z" }, - { url = "https://files.pythonhosted.org/packages/c1/f1/454ac3962671a754f3cb49242472df5c2cced4eb959ae203a377b45b1a3c/zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003", size = 5208682, upload-time = "2024-07-15T00:14:47.407Z" }, - { url = "https://files.pythonhosted.org/packages/85/b2/1734b0fff1634390b1b887202d557d2dd542de84a4c155c258cf75da4773/zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78", size = 5669707, upload-time = "2024-07-15T00:15:03.529Z" }, - { url = "https://files.pythonhosted.org/packages/52/5a/87d6971f0997c4b9b09c495bf92189fb63de86a83cadc4977dc19735f652/zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473", size = 5201792, upload-time = "2024-07-15T00:15:28.372Z" }, - { url = "https://files.pythonhosted.org/packages/79/02/6f6a42cc84459d399bd1a4e1adfc78d4dfe45e56d05b072008d10040e13b/zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160", size = 430586, upload-time = "2024-07-15T00:15:32.26Z" }, - { url = "https://files.pythonhosted.org/packages/be/a2/4272175d47c623ff78196f3c10e9dc7045c1b9caf3735bf041e65271eca4/zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0", size = 495420, upload-time = "2024-07-15T00:15:34.004Z" }, - { url = "https://files.pythonhosted.org/packages/7b/83/f23338c963bd9de687d47bf32efe9fd30164e722ba27fb59df33e6b1719b/zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094", size = 788713, upload-time = "2024-07-15T00:15:35.815Z" }, - { url = "https://files.pythonhosted.org/packages/5b/b3/1a028f6750fd9227ee0b937a278a434ab7f7fdc3066c3173f64366fe2466/zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8", size = 633459, upload-time = "2024-07-15T00:15:37.995Z" }, - { url = "https://files.pythonhosted.org/packages/26/af/36d89aae0c1f95a0a98e50711bc5d92c144939efc1f81a2fcd3e78d7f4c1/zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1", size = 4945707, upload-time = "2024-07-15T00:15:39.872Z" }, - { url = "https://files.pythonhosted.org/packages/cd/2e/2051f5c772f4dfc0aae3741d5fc72c3dcfe3aaeb461cc231668a4db1ce14/zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072", size = 5306545, upload-time = "2024-07-15T00:15:41.75Z" }, - { url = "https://files.pythonhosted.org/packages/0a/9e/a11c97b087f89cab030fa71206963090d2fecd8eb83e67bb8f3ffb84c024/zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20", size = 5337533, upload-time = "2024-07-15T00:15:44.114Z" }, - { url = "https://files.pythonhosted.org/packages/fc/79/edeb217c57fe1bf16d890aa91a1c2c96b28c07b46afed54a5dcf310c3f6f/zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373", size = 5436510, upload-time = "2024-07-15T00:15:46.509Z" }, - { url = "https://files.pythonhosted.org/packages/81/4f/c21383d97cb7a422ddf1ae824b53ce4b51063d0eeb2afa757eb40804a8ef/zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db", size = 4859973, upload-time = "2024-07-15T00:15:49.939Z" }, - { url = "https://files.pythonhosted.org/packages/ab/15/08d22e87753304405ccac8be2493a495f529edd81d39a0870621462276ef/zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772", size = 4936968, upload-time = "2024-07-15T00:15:52.025Z" }, - { url = "https://files.pythonhosted.org/packages/eb/fa/f3670a597949fe7dcf38119a39f7da49a8a84a6f0b1a2e46b2f71a0ab83f/zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105", size = 5467179, upload-time = "2024-07-15T00:15:54.971Z" }, - { url = "https://files.pythonhosted.org/packages/4e/a9/dad2ab22020211e380adc477a1dbf9f109b1f8d94c614944843e20dc2a99/zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba", size = 4848577, upload-time = "2024-07-15T00:15:57.634Z" }, - { url = "https://files.pythonhosted.org/packages/08/03/dd28b4484b0770f1e23478413e01bee476ae8227bbc81561f9c329e12564/zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd", size = 4693899, upload-time = "2024-07-15T00:16:00.811Z" }, - { url = "https://files.pythonhosted.org/packages/2b/64/3da7497eb635d025841e958bcd66a86117ae320c3b14b0ae86e9e8627518/zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a", size = 5199964, upload-time = "2024-07-15T00:16:03.669Z" }, - { url = "https://files.pythonhosted.org/packages/43/a4/d82decbab158a0e8a6ebb7fc98bc4d903266bce85b6e9aaedea1d288338c/zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90", size = 5655398, upload-time = "2024-07-15T00:16:06.694Z" }, - { url = "https://files.pythonhosted.org/packages/f2/61/ac78a1263bc83a5cf29e7458b77a568eda5a8f81980691bbc6eb6a0d45cc/zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35", size = 5191313, upload-time = "2024-07-15T00:16:09.758Z" }, - { url = "https://files.pythonhosted.org/packages/e7/54/967c478314e16af5baf849b6ee9d6ea724ae5b100eb506011f045d3d4e16/zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d", size = 430877, upload-time = "2024-07-15T00:16:11.758Z" }, - { url = "https://files.pythonhosted.org/packages/75/37/872d74bd7739639c4553bf94c84af7d54d8211b626b352bc57f0fd8d1e3f/zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b", size = 495595, upload-time = "2024-07-15T00:16:13.731Z" }, - { url = "https://files.pythonhosted.org/packages/80/f1/8386f3f7c10261fe85fbc2c012fdb3d4db793b921c9abcc995d8da1b7a80/zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9", size = 788975, upload-time = "2024-07-15T00:16:16.005Z" }, - { url = "https://files.pythonhosted.org/packages/16/e8/cbf01077550b3e5dc86089035ff8f6fbbb312bc0983757c2d1117ebba242/zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a", size = 633448, upload-time = "2024-07-15T00:16:17.897Z" }, - { url = "https://files.pythonhosted.org/packages/06/27/4a1b4c267c29a464a161aeb2589aff212b4db653a1d96bffe3598f3f0d22/zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2", size = 4945269, upload-time = "2024-07-15T00:16:20.136Z" }, - { url = "https://files.pythonhosted.org/packages/7c/64/d99261cc57afd9ae65b707e38045ed8269fbdae73544fd2e4a4d50d0ed83/zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5", size = 5306228, upload-time = "2024-07-15T00:16:23.398Z" }, - { url = "https://files.pythonhosted.org/packages/7a/cf/27b74c6f22541f0263016a0fd6369b1b7818941de639215c84e4e94b2a1c/zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f", size = 5336891, upload-time = "2024-07-15T00:16:26.391Z" }, - { url = "https://files.pythonhosted.org/packages/fa/18/89ac62eac46b69948bf35fcd90d37103f38722968e2981f752d69081ec4d/zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed", size = 5436310, upload-time = "2024-07-15T00:16:29.018Z" }, - { url = "https://files.pythonhosted.org/packages/a8/a8/5ca5328ee568a873f5118d5b5f70d1f36c6387716efe2e369010289a5738/zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea", size = 4859912, upload-time = "2024-07-15T00:16:31.871Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ca/3781059c95fd0868658b1cf0440edd832b942f84ae60685d0cfdb808bca1/zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847", size = 4936946, upload-time = "2024-07-15T00:16:34.593Z" }, - { url = "https://files.pythonhosted.org/packages/ce/11/41a58986f809532742c2b832c53b74ba0e0a5dae7e8ab4642bf5876f35de/zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171", size = 5466994, upload-time = "2024-07-15T00:16:36.887Z" }, - { url = "https://files.pythonhosted.org/packages/83/e3/97d84fe95edd38d7053af05159465d298c8b20cebe9ccb3d26783faa9094/zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840", size = 4848681, upload-time = "2024-07-15T00:16:39.709Z" }, - { url = "https://files.pythonhosted.org/packages/6e/99/cb1e63e931de15c88af26085e3f2d9af9ce53ccafac73b6e48418fd5a6e6/zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690", size = 4694239, upload-time = "2024-07-15T00:16:41.83Z" }, - { url = "https://files.pythonhosted.org/packages/ab/50/b1e703016eebbc6501fc92f34db7b1c68e54e567ef39e6e59cf5fb6f2ec0/zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b", size = 5200149, upload-time = "2024-07-15T00:16:44.287Z" }, - { url = "https://files.pythonhosted.org/packages/aa/e0/932388630aaba70197c78bdb10cce2c91fae01a7e553b76ce85471aec690/zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057", size = 5655392, upload-time = "2024-07-15T00:16:46.423Z" }, - { url = "https://files.pythonhosted.org/packages/02/90/2633473864f67a15526324b007a9f96c96f56d5f32ef2a56cc12f9548723/zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33", size = 5191299, upload-time = "2024-07-15T00:16:49.053Z" }, - { url = "https://files.pythonhosted.org/packages/b0/4c/315ca5c32da7e2dc3455f3b2caee5c8c2246074a61aac6ec3378a97b7136/zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd", size = 430862, upload-time = "2024-07-15T00:16:51.003Z" }, - { url = "https://files.pythonhosted.org/packages/a2/bf/c6aaba098e2d04781e8f4f7c0ba3c7aa73d00e4c436bcc0cf059a66691d1/zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b", size = 495578, upload-time = "2024-07-15T00:16:53.135Z" }, -]