diff --git a/pyproject.toml b/pyproject.toml index ac437e85..85ef3c74 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,7 @@ keywords = [ dependencies = [ "apify-client>=2.0.0,<3.0.0", "apify-shared>=2.0.0,<3.0.0", - "crawlee==1.0.0rc1", + "crawlee==0.6.13b37", "cachetools>=5.5.0", "cryptography>=42.0.0", "impit>=0.5.3", diff --git a/src/apify/_actor.py b/src/apify/_actor.py index c9044117..eafd1889 100644 --- a/src/apify/_actor.py +++ b/src/apify/_actor.py @@ -401,6 +401,7 @@ async def open_dataset( self, *, id: str | None = None, + alias: str | None = None, name: str | None = None, force_cloud: bool = False, ) -> Dataset: @@ -411,10 +412,12 @@ async def open_dataset( the Apify cloud. Args: - id: ID of the dataset to be opened. If neither `id` nor `name` are provided, the method returns - the default dataset associated with the Actor run. - name: Name of the dataset to be opened. If neither `id` nor `name` are provided, the method returns - the default dataset associated with the Actor run. + id: The ID of the dataset to open. If provided, searches for existing dataset by ID. + Mutually exclusive with name and alias. + name: The name of the dataset to open (global scope, persists across runs). + Mutually exclusive with id and alias. + alias: The alias of the dataset to open (run scope, creates unnamed storage). + Mutually exclusive with id and name. force_cloud: If set to `True` then the Apify cloud storage is always used. This way it is possible to combine local and cloud storage. @@ -428,6 +431,7 @@ async def open_dataset( return await Dataset.open( id=id, + alias=alias, name=name, configuration=self._configuration, storage_client=storage_client, @@ -437,6 +441,7 @@ async def open_key_value_store( self, *, id: str | None = None, + alias: str | None = None, name: str | None = None, force_cloud: bool = False, ) -> KeyValueStore: @@ -446,10 +451,12 @@ async def open_key_value_store( and retrieved using a unique key. The actual data is stored either on a local filesystem or in the Apify cloud. Args: - id: ID of the key-value store to be opened. If neither `id` nor `name` are provided, the method returns - the default key-value store associated with the Actor run. - name: Name of the key-value store to be opened. If neither `id` nor `name` are provided, the method - returns the default key-value store associated with the Actor run. + id: The ID of the KVS to open. If provided, searches for existing KVS by ID. + Mutually exclusive with name and alias. + name: The name of the KVS to open (global scope, persists across runs). + Mutually exclusive with id and alias. + alias: The alias of the KVS to open (run scope, creates unnamed storage). + Mutually exclusive with id and name. force_cloud: If set to `True` then the Apify cloud storage is always used. This way it is possible to combine local and cloud storage. @@ -462,6 +469,7 @@ async def open_key_value_store( return await KeyValueStore.open( id=id, + alias=alias, name=name, configuration=self._configuration, storage_client=storage_client, @@ -471,6 +479,7 @@ async def open_request_queue( self, *, id: str | None = None, + alias: str | None = None, name: str | None = None, force_cloud: bool = False, ) -> RequestQueue: @@ -482,10 +491,12 @@ async def open_request_queue( crawling orders. Args: - id: ID of the request queue to be opened. If neither `id` nor `name` are provided, the method returns - the default request queue associated with the Actor run. - name: Name of the request queue to be opened. If neither `id` nor `name` are provided, the method returns - the default request queue associated with the Actor run. + id: The ID of the RQ to open. If provided, searches for existing RQ by ID. + Mutually exclusive with name and alias. + name: The name of the RQ to open (global scope, persists across runs). + Mutually exclusive with id and alias. + alias: The alias of the RQ to open (run scope, creates unnamed storage). + Mutually exclusive with id and name. force_cloud: If set to `True` then the Apify cloud storage is always used. This way it is possible to combine local and cloud storage. @@ -499,6 +510,7 @@ async def open_request_queue( return await RequestQueue.open( id=id, + alias=alias, name=name, configuration=self._configuration, storage_client=storage_client, diff --git a/src/apify/events/__init__.py b/src/apify/events/__init__.py index c50c4ab8..b4d9ea6c 100644 --- a/src/apify/events/__init__.py +++ b/src/apify/events/__init__.py @@ -1,5 +1,5 @@ -from crawlee.events import EventManager, LocalEventManager +from crawlee.events import Event, EventManager, LocalEventManager from ._apify_event_manager import ApifyEventManager -__all__ = ['ApifyEventManager', 'EventManager', 'LocalEventManager'] +__all__ = ['ApifyEventManager', 'Event', 'EventManager', 'LocalEventManager'] diff --git a/src/apify/storage_clients/_apify/_dataset_client.py b/src/apify/storage_clients/_apify/_dataset_client.py index 64613b21..bb13299a 100644 --- a/src/apify/storage_clients/_apify/_dataset_client.py +++ b/src/apify/storage_clients/_apify/_dataset_client.py @@ -12,6 +12,8 @@ from crawlee.storage_clients._base import DatasetClient from crawlee.storage_clients.models import DatasetItemsListPage, DatasetMetadata +from ._utils import resolve_alias_to_id, store_alias_mapping + if TYPE_CHECKING: from collections.abc import AsyncIterator @@ -66,6 +68,7 @@ async def open( *, id: str | None, name: str | None, + alias: str | None, configuration: Configuration, ) -> ApifyDatasetClient: """Open an Apify dataset client. @@ -74,22 +77,27 @@ async def open( It handles authentication, storage lookup/creation, and metadata retrieval. Args: - id: The ID of an existing dataset to open. If provided, the client will connect to this specific storage. - Cannot be used together with `name`. - name: The name of a dataset to get or create. If a storage with this name exists, it will be opened; - otherwise, a new one will be created. Cannot be used together with `id`. + id: The ID of the dataset to open. If provided, searches for existing dataset by ID. + Mutually exclusive with name and alias. + name: The name of the dataset to open (global scope, persists across runs). + Mutually exclusive with id and alias. + alias: The alias of the dataset to open (run scope, creates unnamed storage). + Mutually exclusive with id and name. configuration: The configuration object containing API credentials and settings. Must include a valid `token` and `api_base_url`. May also contain a `default_dataset_id` for fallback when neither - `id` nor `name` is provided. + `id`, `name`, nor `alias` is provided. Returns: An instance for the opened or created storage client. Raises: - ValueError: If the configuration is missing required fields (token, api_base_url), if both `id` and `name` - are provided, or if neither `id` nor `name` is provided and no default storage ID is available in - the configuration. + ValueError: If the configuration is missing required fields (token, api_base_url), if more than one of + `id`, `name`, or `alias` is provided, or if none are provided and no default storage ID is available + in the configuration. """ + if sum(1 for param in [id, name, alias] if param is not None) > 1: + raise ValueError('Only one of "id", "name", or "alias" can be specified, not multiple.') + token = configuration.token if not token: raise ValueError(f'Apify storage client requires a valid token in Configuration (token={token}).') @@ -115,27 +123,35 @@ async def open( ) apify_datasets_client = apify_client_async.datasets() - # If both id and name are provided, raise an error. - if id and name: - raise ValueError('Only one of "id" or "name" can be specified, not both.') + # Normalize 'default' alias to None + alias = None if alias == 'default' else alias - # If id is provided, get the storage by ID. - if id and name is None: - apify_dataset_client = apify_client_async.dataset(dataset_id=id) + # Handle alias resolution + if alias: + # Try to resolve alias to existing storage ID + resolved_id = await resolve_alias_to_id(alias, 'dataset', configuration) + if resolved_id: + id = resolved_id + else: + # Create a new storage and store the alias mapping + new_storage_metadata = DatasetMetadata.model_validate( + await apify_datasets_client.get_or_create(), + ) + id = new_storage_metadata.id + await store_alias_mapping(alias, 'dataset', id, configuration) # If name is provided, get or create the storage by name. - if name and id is None: + elif name: id = DatasetMetadata.model_validate( await apify_datasets_client.get_or_create(name=name), ).id - apify_dataset_client = apify_client_async.dataset(dataset_id=id) - # If both id and name are None, try to get the default storage ID from environment variables. - # The default storage ID environment variable is set by the Apify platform. It also contains - # a new storage ID after Actor's reboot or migration. - if id is None and name is None: + # If none are provided, try to get the default storage ID from environment variables. + elif id is None: id = configuration.default_dataset_id - apify_dataset_client = apify_client_async.dataset(dataset_id=id) + + # Now create the client for the determined ID + apify_dataset_client = apify_client_async.dataset(dataset_id=id) # Fetch its metadata. metadata = await apify_dataset_client.get() @@ -150,7 +166,7 @@ async def open( # Verify that the storage exists by fetching its metadata again. metadata = await apify_dataset_client.get() if metadata is None: - raise ValueError(f'Opening dataset with id={id} and name={name} failed.') + raise ValueError(f'Opening dataset with id={id}, name={name}, and alias={alias} failed.') return cls( api_client=apify_dataset_client, diff --git a/src/apify/storage_clients/_apify/_key_value_store_client.py b/src/apify/storage_clients/_apify/_key_value_store_client.py index fb841320..0237a338 100644 --- a/src/apify/storage_clients/_apify/_key_value_store_client.py +++ b/src/apify/storage_clients/_apify/_key_value_store_client.py @@ -12,6 +12,7 @@ from crawlee.storage_clients.models import KeyValueStoreRecord, KeyValueStoreRecordMetadata from ._models import ApifyKeyValueStoreMetadata, KeyValueStoreListKeysPage +from ._utils import resolve_alias_to_id, store_alias_mapping from apify._crypto import create_hmac_signature if TYPE_CHECKING: @@ -58,6 +59,7 @@ async def open( *, id: str | None, name: str | None, + alias: str | None, configuration: Configuration, ) -> ApifyKeyValueStoreClient: """Open an Apify key-value store client. @@ -66,22 +68,27 @@ async def open( It handles authentication, storage lookup/creation, and metadata retrieval. Args: - id: The ID of an existing key-value store to open. If provided, the client will connect to this specific - storage. Cannot be used together with `name`. - name: The name of a key-value store to get or create. If a storage with this name exists, it will be - opened; otherwise, a new one will be created. Cannot be used together with `id`. + id: The ID of the KVS to open. If provided, searches for existing KVS by ID. + Mutually exclusive with name and alias. + name: The name of the KVS to open (global scope, persists across runs). + Mutually exclusive with id and alias. + alias: The alias of the KVS to open (run scope, creates unnamed storage). + Mutually exclusive with id and name. configuration: The configuration object containing API credentials and settings. Must include a valid `token` and `api_base_url`. May also contain a `default_key_value_store_id` for fallback when - neither `id` nor `name` is provided. + neither `id`, `name`, nor `alias` is provided. Returns: An instance for the opened or created storage client. Raises: - ValueError: If the configuration is missing required fields (token, api_base_url), if both `id` and `name` - are provided, or if neither `id` nor `name` is provided and no default storage ID is available + ValueError: If the configuration is missing required fields (token, api_base_url), if more than one of + `id`, `name`, or `alias` is provided, or if none are provided and no default storage ID is available in the configuration. """ + if sum(1 for param in [id, name, alias] if param is not None) > 1: + raise ValueError('Only one of "id", "name", or "alias" can be specified, not multiple.') + token = configuration.token if not token: raise ValueError(f'Apify storage client requires a valid token in Configuration (token={token}).') @@ -107,27 +114,35 @@ async def open( ) apify_kvss_client = apify_client_async.key_value_stores() - # If both id and name are provided, raise an error. - if id and name: - raise ValueError('Only one of "id" or "name" can be specified, not both.') - - # If id is provided, get the storage by ID. - if id and name is None: - apify_kvs_client = apify_client_async.key_value_store(key_value_store_id=id) + # Normalize 'default' alias to None + alias = None if alias == 'default' else alias + + # Handle alias resolution + if alias: + # Try to resolve alias to existing storage ID + resolved_id = await resolve_alias_to_id(alias, 'kvs', configuration) + if resolved_id: + id = resolved_id + else: + # Create a new storage and store the alias mapping + new_storage_metadata = ApifyKeyValueStoreMetadata.model_validate( + await apify_kvss_client.get_or_create(), + ) + id = new_storage_metadata.id + await store_alias_mapping(alias, 'kvs', id, configuration) # If name is provided, get or create the storage by name. - if name and id is None: + elif name: id = ApifyKeyValueStoreMetadata.model_validate( await apify_kvss_client.get_or_create(name=name), ).id - apify_kvs_client = apify_client_async.key_value_store(key_value_store_id=id) - # If both id and name are None, try to get the default storage ID from environment variables. - # The default storage ID environment variable is set by the Apify platform. It also contains - # a new storage ID after Actor's reboot or migration. - if id is None and name is None: + # If none are provided, try to get the default storage ID from environment variables. + elif id is None: id = configuration.default_key_value_store_id - apify_kvs_client = apify_client_async.key_value_store(key_value_store_id=id) + + # Now create the client for the determined ID + apify_kvs_client = apify_client_async.key_value_store(key_value_store_id=id) # Fetch its metadata. metadata = await apify_kvs_client.get() @@ -142,7 +157,7 @@ async def open( # Verify that the storage exists by fetching its metadata again. metadata = await apify_kvs_client.get() if metadata is None: - raise ValueError(f'Opening key-value store with id={id} and name={name} failed.') + raise ValueError(f'Opening key-value store with id={id}, name={name}, and alias={alias} failed.') return cls( api_client=apify_kvs_client, diff --git a/src/apify/storage_clients/_apify/_request_queue_client.py b/src/apify/storage_clients/_apify/_request_queue_client.py index ec94f201..315de9d1 100644 --- a/src/apify/storage_clients/_apify/_request_queue_client.py +++ b/src/apify/storage_clients/_apify/_request_queue_client.py @@ -18,6 +18,7 @@ from crawlee.storage_clients.models import AddRequestsResponse, ProcessedRequest, RequestQueueMetadata from ._models import CachedRequest, ProlongRequestLockResponse, RequestQueueHead +from ._utils import resolve_alias_to_id, store_alias_mapping from apify import Request if TYPE_CHECKING: @@ -135,6 +136,7 @@ async def open( *, id: str | None, name: str | None, + alias: str | None, configuration: Configuration, ) -> ApifyRequestQueueClient: """Open an Apify request queue client. @@ -144,22 +146,27 @@ async def open( management structures. Args: - id: The ID of an existing request queue to open. If provided, the client will connect to this specific - storage. Cannot be used together with `name`. - name: The name of a request queue to get or create. If a storage with this name exists, it will be opened; - otherwise, a new one will be created. Cannot be used together with `id`. + id: The ID of the RQ to open. If provided, searches for existing RQ by ID. + Mutually exclusive with name and alias. + name: The name of the RQ to open (global scope, persists across runs). + Mutually exclusive with id and alias. + alias: The alias of the RQ to open (run scope, creates unnamed storage). + Mutually exclusive with id and name. configuration: The configuration object containing API credentials and settings. Must include a valid `token` and `api_base_url`. May also contain a `default_request_queue_id` for fallback when neither - `id` nor `name` is provided. + `id`, `name`, nor `alias` is provided. Returns: An instance for the opened or created storage client. Raises: - ValueError: If the configuration is missing required fields (token, api_base_url), if both `id` and `name` - are provided, or if neither `id` nor `name` is provided and no default storage ID is available + ValueError: If the configuration is missing required fields (token, api_base_url), if more than one of + `id`, `name`, or `alias` is provided, or if none are provided and no default storage ID is available in the configuration. """ + if sum(1 for param in [id, name, alias] if param is not None) > 1: + raise ValueError('Only one of "id", "name", or "alias" can be specified, not multiple.') + token = configuration.token if not token: raise ValueError(f'Apify storage client requires a valid token in Configuration (token={token}).') @@ -185,25 +192,32 @@ async def open( ) apify_rqs_client = apify_client_async.request_queues() - match (id, name): - case (None, None): - # If both id and name are None, try to get the default storage ID from environment variables. - # The default storage ID environment variable is set by the Apify platform. It also contains - # a new storage ID after Actor's reboot or migration. - id = configuration.default_request_queue_id - case (None, name): - # If only name is provided, get or create the storage by name. - id = RequestQueueMetadata.model_validate( - await apify_rqs_client.get_or_create(name=name), - ).id - case (_, None): - # If only id is provided, use it. - pass - case (_, _): - # If both id and name are provided, raise an error. - raise ValueError('Only one of "id" or "name" can be specified, not both.') - if id is None: - raise RuntimeError('Unreachable code') + # Normalize 'default' alias to None + alias = None if alias == 'default' else alias + + # Handle alias resolution + if alias: + # Try to resolve alias to existing storage ID + resolved_id = await resolve_alias_to_id(alias, 'rq', configuration) + if resolved_id: + id = resolved_id + else: + # Create a new storage and store the alias mapping + new_storage_metadata = RequestQueueMetadata.model_validate( + await apify_rqs_client.get_or_create(), + ) + id = new_storage_metadata.id + await store_alias_mapping(alias, 'rq', id, configuration) + + # If name is provided, get or create the storage by name. + elif name: + id = RequestQueueMetadata.model_validate( + await apify_rqs_client.get_or_create(name=name), + ).id + + # If none are provided, try to get the default storage ID from environment variables. + elif id is None: + id = configuration.default_request_queue_id # Use suitable client_key to make `hadMultipleClients` response of Apify API useful. # It should persist across migrated or resurrected Actor runs on the Apify platform. @@ -227,7 +241,7 @@ async def open( # Verify that the storage exists by fetching its metadata again. metadata = await apify_rq_client.get() if metadata is None: - raise ValueError(f'Opening request queue with id={id} and name={name} failed.') + raise ValueError(f'Opening request queue with id={id}, name={name}, and alias={alias} failed.') metadata_model = RequestQueueMetadata.model_validate(metadata) diff --git a/src/apify/storage_clients/_apify/_storage_client.py b/src/apify/storage_clients/_apify/_storage_client.py index 689e2c77..dbd958f4 100644 --- a/src/apify/storage_clients/_apify/_storage_client.py +++ b/src/apify/storage_clients/_apify/_storage_client.py @@ -25,6 +25,7 @@ async def create_dataset_client( *, id: str | None = None, name: str | None = None, + alias: str | None = None, configuration: Configuration | None = None, ) -> ApifyDatasetClient: # Import here to avoid circular imports. @@ -32,7 +33,7 @@ async def create_dataset_client( configuration = configuration or ApifyConfiguration.get_global_configuration() if isinstance(configuration, ApifyConfiguration): - return await ApifyDatasetClient.open(id=id, name=name, configuration=configuration) + return await ApifyDatasetClient.open(id=id, name=name, alias=alias, configuration=configuration) raise TypeError( f'Expected "configuration" to be an instance of "apify.Configuration", ' @@ -45,6 +46,7 @@ async def create_kvs_client( *, id: str | None = None, name: str | None = None, + alias: str | None = None, configuration: Configuration | None = None, ) -> ApifyKeyValueStoreClient: # Import here to avoid circular imports. @@ -52,7 +54,7 @@ async def create_kvs_client( configuration = configuration or ApifyConfiguration.get_global_configuration() if isinstance(configuration, ApifyConfiguration): - return await ApifyKeyValueStoreClient.open(id=id, name=name, configuration=configuration) + return await ApifyKeyValueStoreClient.open(id=id, name=name, alias=alias, configuration=configuration) raise TypeError( f'Expected "configuration" to be an instance of "apify.Configuration", ' @@ -65,6 +67,7 @@ async def create_rq_client( *, id: str | None = None, name: str | None = None, + alias: str | None = None, configuration: Configuration | None = None, ) -> ApifyRequestQueueClient: # Import here to avoid circular imports. @@ -72,7 +75,7 @@ async def create_rq_client( configuration = configuration or ApifyConfiguration.get_global_configuration() if isinstance(configuration, ApifyConfiguration): - return await ApifyRequestQueueClient.open(id=id, name=name, configuration=configuration) + return await ApifyRequestQueueClient.open(id=id, name=name, alias=alias, configuration=configuration) raise TypeError( f'Expected "configuration" to be an instance of "apify.Configuration", ' diff --git a/src/apify/storage_clients/_apify/_utils.py b/src/apify/storage_clients/_apify/_utils.py new file mode 100644 index 00000000..25bd4816 --- /dev/null +++ b/src/apify/storage_clients/_apify/_utils.py @@ -0,0 +1,117 @@ +from __future__ import annotations + +from logging import getLogger +from typing import TYPE_CHECKING, Literal + +from apify_client import ApifyClientAsync + +if TYPE_CHECKING: + from apify_client.clients import KeyValueStoreClientAsync + + from apify import Configuration + +logger = getLogger(__name__) + +_ALIAS_MAPPING_KEY = '__STORAGE_ALIASES_MAPPING' + + +async def resolve_alias_to_id( + alias: str, + storage_type: Literal['dataset', 'kvs', 'rq'], + configuration: Configuration, +) -> str | None: + """Resolve a storage alias to its corresponding storage ID. + + Args: + alias: The alias to resolve. + storage_type: Type of storage ('dataset', 'key_value_store', or 'request_queue'). + configuration: The configuration object containing API credentials. + + Returns: + The storage ID if found, None if the alias doesn't exist. + """ + default_kvs_client = await _get_default_kvs_client(configuration) + + # Create the dictionary key for this alias. + alias_key = f'alias-{storage_type}-{alias}' + + try: + record = await default_kvs_client.get_record(_ALIAS_MAPPING_KEY) + + # get_record can return {key: ..., value: ..., content_type: ...} + if isinstance(record, dict) and 'value' in record: + record = record['value'] + + # Extract the actual data from the KVS record + if isinstance(record, dict) and alias_key in record: + storage_id = record[alias_key] + return str(storage_id) + + except Exception as exc: + # If there's any error accessing the record, treat it as not found. + logger.warning(f'Error accessing alias mapping for {alias}: {exc}') + + return None + + +async def store_alias_mapping( + alias: str, + storage_type: Literal['dataset', 'kvs', 'rq'], + storage_id: str, + configuration: Configuration, +) -> None: + """Store a mapping from alias to storage ID in the default key-value store. + + Args: + alias: The alias to store. + storage_type: Type of storage ('dataset', 'key_value_store', or 'request_queue'). + storage_id: The storage ID to map the alias to. + configuration: The configuration object containing API credentials. + """ + default_kvs_client = await _get_default_kvs_client(configuration) + + # Create the dictionary key for this alias. + alias_key = f'alias-{storage_type}-{alias}' + + try: + record = await default_kvs_client.get_record(_ALIAS_MAPPING_KEY) + + # get_record can return {key: ..., value: ..., content_type: ...} + if isinstance(record, dict) and 'value' in record: + record = record['value'] + + # Update or create the record with the new alias mapping + if isinstance(record, dict): + record[alias_key] = storage_id + else: + record = {alias_key: storage_id} + + # Store the mapping back in the KVS. + await default_kvs_client.set_record(_ALIAS_MAPPING_KEY, record) + except Exception as exc: + logger.warning(f'Error accessing alias mapping for {alias}: {exc}') + + +async def _get_default_kvs_client(configuration: Configuration) -> KeyValueStoreClientAsync: + """Get a client for the default key-value store.""" + token = configuration.token + if not token: + raise ValueError(f'Apify storage client requires a valid token in Configuration (token={token}).') + + api_url = configuration.api_base_url + if not api_url: + raise ValueError(f'Apify storage client requires a valid API URL in Configuration (api_url={api_url}).') + + # Create Apify client with the provided token and API URL + apify_client_async = ApifyClientAsync( + token=token, + api_url=api_url, + max_retries=8, + min_delay_between_retries_millis=500, + timeout_secs=360, + ) + + # Get the default key-value store ID from configuration + default_kvs_id = configuration.default_key_value_store_id + + return apify_client_async.key_value_store(key_value_store_id=default_kvs_id) diff --git a/src/apify/storage_clients/_file_system/_storage_client.py b/src/apify/storage_clients/_file_system/_storage_client.py index 403943e3..fa3acb9c 100644 --- a/src/apify/storage_clients/_file_system/_storage_client.py +++ b/src/apify/storage_clients/_file_system/_storage_client.py @@ -27,9 +27,15 @@ async def create_kvs_client( *, id: str | None = None, name: str | None = None, + alias: str | None = None, configuration: Configuration | None = None, ) -> FileSystemKeyValueStoreClient: configuration = configuration or Configuration.get_global_configuration() - client = await ApifyFileSystemKeyValueStoreClient.open(id=id, name=name, configuration=configuration) + client = await ApifyFileSystemKeyValueStoreClient.open( + id=id, + name=name, + alias=alias, + configuration=configuration, + ) await self._purge_if_needed(client, configuration) return client diff --git a/tests/integration/actor_source_base/requirements.txt b/tests/integration/actor_source_base/requirements.txt index f7ff2350..9f6b32a2 100644 --- a/tests/integration/actor_source_base/requirements.txt +++ b/tests/integration/actor_source_base/requirements.txt @@ -1,4 +1,4 @@ # The test fixture will put the Apify SDK wheel path on the next line APIFY_SDK_WHEEL_PLACEHOLDER uvicorn[standard] -crawlee[parsel]==1.0.0rc1 +crawlee[parsel] == 0.6.13b37 diff --git a/tests/integration/test_actor_dataset.py b/tests/integration/test_actor_dataset.py index 1cce4fd9..c80bb342 100644 --- a/tests/integration/test_actor_dataset.py +++ b/tests/integration/test_actor_dataset.py @@ -145,3 +145,86 @@ async def test_force_cloud( assert dataset_items.items == [dataset_item] finally: await dataset_client.delete() + + +async def test_dataset_defaults( + make_actor: MakeActorFunction, + run_actor: RunActorFunction, +) -> None: + async def main() -> None: + from apify.storages import Dataset + + async with Actor: + was_rebooted = await Actor.get_value('was_rebooted', default_value=False) + + dataset_1 = await Actor.open_dataset() + dataset_2 = await Dataset.open() + + assert dataset_1 is dataset_2 + + if not was_rebooted: + await dataset_1.push_data({'from': 'dataset_1'}) + await dataset_2.push_data({'from': 'dataset_2'}) + await Actor.set_value('was_rebooted', value=True) + await Actor.reboot() + + dataset_11 = await Actor.open_dataset() + dataset_22 = await Dataset.open() + + assert dataset_11 is dataset_22 + assert dataset_1.id == dataset_11.id == dataset_2.id == dataset_22.id + + dataset_1_items = await dataset_11.list_items() + dataset_2_items = await dataset_22.list_items() + + assert dataset_1_items == [{'from': 'dataset_1'}, {'from': 'dataset_2'}] + assert dataset_2_items == [{'from': 'dataset_1'}, {'from': 'dataset_2'}] + + actor = await make_actor(label='dataset-defaults', main_func=main) + run_result = await run_actor(actor) + + assert run_result.status == 'SUCCEEDED' + + +async def test_dataset_aliases( + make_actor: MakeActorFunction, + run_actor: RunActorFunction, +) -> None: + async def main() -> None: + from apify.storages import Dataset + + async with Actor: + was_rebooted = await Actor.get_value('was_rebooted', default_value=False) + + dataset_1 = await Actor.open_dataset(alias='my-alias-dataset-1') + dataset_2 = await Dataset.open(alias='my-alias-dataset-2') + + assert dataset_1 is not dataset_2 + assert dataset_1.id != dataset_2.id + assert dataset_1.name is None + assert dataset_2.name is None + + if not was_rebooted: + await dataset_1.push_data({'from': 'dataset_1'}) + await dataset_2.push_data({'from': 'dataset_2'}) + await Actor.set_value('was_rebooted', value=True) + await Actor.reboot() + + dataset_11 = await Actor.open_dataset(alias='my-alias-dataset-1') + dataset_22 = await Dataset.open(alias='my-alias-dataset-2') + + assert dataset_1.id == dataset_11.id + assert dataset_11 is dataset_1 + + assert dataset_2.id == dataset_22.id + assert dataset_22 is dataset_2 + + dataset_1_items = await dataset_11.list_items() + dataset_2_items = await dataset_22.list_items() + assert dataset_1_items == [{'from': 'dataset_1'}] + assert dataset_2_items == [{'from': 'dataset_2'}] + + actor = await make_actor(label='dataset-aliases', main_func=main) + run_result = await run_actor(actor) + + assert run_result.status == 'SUCCEEDED' diff --git a/tests/integration/test_actor_key_value_store.py b/tests/integration/test_actor_key_value_store.py index 799cbea3..ede8f885 100644 --- a/tests/integration/test_actor_key_value_store.py +++ b/tests/integration/test_actor_key_value_store.py @@ -230,3 +230,86 @@ async def main() -> None: run_result = await run_actor(actor) assert run_result.status == 'SUCCEEDED' + + +async def test_kvs_defaults( + make_actor: MakeActorFunction, + run_actor: RunActorFunction, +) -> None: + async def main() -> None: + from apify.storages import KeyValueStore + + async with Actor: + was_rebooted = await Actor.get_value('was_rebooted', default_value=False) + + kvs_1 = await Actor.open_key_value_store() + kvs_2 = await KeyValueStore.open() + + assert kvs_1 is kvs_2 + + if not was_rebooted: + await kvs_1.set_value('key1', {'from': 'kvs_1'}) + await kvs_2.set_value('key2', {'from': 'kvs_2'}) + await Actor.set_value('was_rebooted', value=True) + await Actor.reboot() + + kvs_11 = await Actor.open_key_value_store() + kvs_22 = await KeyValueStore.open() + + assert kvs_11 is kvs_22 + assert kvs_1.id == kvs_11.id == kvs_2.id == kvs_22.id + + kvs_1_item = await kvs_11.get_value('key1') + kvs_2_item = await kvs_22.get_value('key2') + + assert kvs_1_item == {'from': 'kvs_1'} + assert kvs_2_item == {'from': 'kvs_2'} + + actor = await make_actor(label='kvs-defaults', main_func=main) + run_result = await run_actor(actor) + + assert run_result.status == 'SUCCEEDED' + + +async def test_kvs_aliases( + make_actor: MakeActorFunction, + run_actor: RunActorFunction, +) -> None: + async def main() -> None: + from apify.storages import KeyValueStore + + async with Actor: + was_rebooted = await Actor.get_value('was_rebooted', default_value=False) + + kvs_1 = await Actor.open_key_value_store(alias='my-alias-kvs-1') + kvs_2 = await KeyValueStore.open(alias='my-alias-kvs-2') + + assert kvs_1 is not kvs_2 + assert kvs_1.id != kvs_2.id + assert kvs_1.name is None + assert kvs_2.name is None + + if not was_rebooted: + await kvs_1.set_value('key1', {'from': 'kvs_1'}) + await kvs_2.set_value('key1', {'from': 'kvs_2'}) + await Actor.set_value('was_rebooted', value=True) + await Actor.reboot() + + kvs_11 = await Actor.open_key_value_store(alias='my-alias-kvs-1') + kvs_22 = await KeyValueStore.open(alias='my-alias-kvs-2') + + assert kvs_1.id == kvs_11.id + assert kvs_11 is kvs_1 + + assert kvs_2.id == kvs_22.id + assert kvs_22 is kvs_2 + + kvs_1_item = await kvs_11.get_value('key1') + kvs_2_item = await kvs_22.get_value('key1') + assert kvs_1_item == {'from': 'kvs_1'} + assert kvs_2_item == {'from': 'kvs_2'} + + actor = await make_actor(label='kvs-aliases', main_func=main) + run_result = await run_actor(actor) + + assert run_result.status == 'SUCCEEDED' diff --git a/tests/integration/test_actor_request_queue.py b/tests/integration/test_actor_request_queue.py index 80a8b628..9521234b 100644 --- a/tests/integration/test_actor_request_queue.py +++ b/tests/integration/test_actor_request_queue.py @@ -398,3 +398,91 @@ async def main() -> None: async with streamed_log: run_result = ActorRun.model_validate(await run_client.wait_for_finish(wait_secs=600)) assert run_result.status == 'SUCCEEDED' + + +async def test_rq_defaults( + make_actor: MakeActorFunction, + run_actor: RunActorFunction, +) -> None: + async def main() -> None: + from apify import Request + from apify.storages import RequestQueue + + async with Actor: + was_rebooted = await Actor.get_value('was_rebooted', default_value=False) + + rq_1 = await Actor.open_request_queue() + rq_2 = await RequestQueue.open() + + assert rq_1 is rq_2 + + if not was_rebooted: + await rq_1.add_request(Request(url='https://example.com/rq_1', unique_key='rq_1')) + await rq_2.add_request(Request(url='https://example.com/rq_2', unique_key='rq_2')) + await Actor.set_value('was_rebooted', value=True) + await Actor.reboot() + + rq_11 = await Actor.open_request_queue() + rq_22 = await RequestQueue.open() + + assert rq_11 is rq_22 + assert rq_1.id == rq_11.id == rq_2.id == rq_22.id + + request_1 = await rq_11.fetch_next_request() + request_2 = await rq_22.fetch_next_request() + + assert request_1 is not None + assert request_2 is not None + assert {request_1.url, request_2.url} == {'https://example.com/rq_1', 'https://example.com/rq_2'} + + actor = await make_actor(label='rq-defaults', main_func=main) + run_result = await run_actor(actor) + + assert run_result.status == 'SUCCEEDED' + + +async def test_rq_aliases( + make_actor: MakeActorFunction, + run_actor: RunActorFunction, +) -> None: + async def main() -> None: + from apify import Request + from apify.storages import RequestQueue + + async with Actor: + was_rebooted = await Actor.get_value('was_rebooted', default_value=False) + + rq_1 = await Actor.open_request_queue(alias='my-alias-rq-1') + rq_2 = await RequestQueue.open(alias='my-alias-rq-2') + + assert rq_1 is not rq_2 + assert rq_1.id != rq_2.id + assert rq_1.name is None + assert rq_2.name is None + + if not was_rebooted: + await rq_1.add_request(Request(url='https://example.com/rq_1', unique_key='rq_1')) + await rq_2.add_request(Request(url='https://example.com/rq_2', unique_key='rq_2')) + await Actor.set_value('was_rebooted', value=True) + await Actor.reboot() + + rq_11 = await Actor.open_request_queue(alias='my-alias-rq-1') + rq_22 = await RequestQueue.open(alias='my-alias-rq-2') + + assert rq_1.id == rq_11.id + assert rq_11 is rq_1 + + assert rq_2.id == rq_22.id + assert rq_22 is rq_2 + + request_1 = await rq_11.fetch_next_request() + request_2 = await rq_22.fetch_next_request() + assert request_1 is not None + assert request_1.url == 'https://example.com/rq_1' + assert request_2 is not None + assert request_2.url == 'https://example.com/rq_2' + + actor = await make_actor(label='rq-aliases', main_func=main) + run_result = await run_actor(actor) + + assert run_result.status == 'SUCCEEDED' diff --git a/tests/unit/storage_clients/test_file_system.py b/tests/unit/storage_clients/test_file_system.py index ed7cf413..71bb8c53 100644 --- a/tests/unit/storage_clients/test_file_system.py +++ b/tests/unit/storage_clients/test_file_system.py @@ -22,6 +22,7 @@ async def test_purge_preserves_input_file_and_metadata() -> None: kvs_storage_client = await ApifyFileSystemKeyValueStoreClient.open( id=None, name='test-kvs', + alias=None, configuration=configuration, ) diff --git a/uv.lock b/uv.lock index f747f202..3dfd7474 100644 --- a/uv.lock +++ b/uv.lock @@ -76,7 +76,7 @@ requires-dist = [ { name = "apify-client", specifier = ">=2.0.0,<3.0.0" }, { name = "apify-shared", specifier = ">=2.0.0,<3.0.0" }, { name = "cachetools", specifier = ">=5.5.0" }, - { name = "crawlee", specifier = "==1.0.0rc1" }, + { name = "crawlee", specifier = "==0.6.13b37" }, { name = "cryptography", specifier = ">=42.0.0" }, { name = "impit", specifier = ">=0.5.3" }, { name = "lazy-object-proxy", specifier = ">=1.11.0" }, @@ -477,7 +477,7 @@ toml = [ [[package]] name = "crawlee" -version = "1.0.0rc1" +version = "0.6.13b37" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cachetools" }, @@ -493,9 +493,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1e/1d/31d7710b54c78d12cdc359f8f30478714768cfdab669f4464a8632bb5db6/crawlee-1.0.0rc1.tar.gz", hash = "sha256:bf644826a030fb01c1c525d7da1a73f4ce3fb89671eca9544aa0fccc5e9eaaa6", size = 24822393, upload-time = "2025-08-22T06:46:29.831Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/64/13521e97bb0dcd606c1013c1f184a943e3a6b36e2e4ccf3adda9eb474efd/crawlee-0.6.13b37.tar.gz", hash = "sha256:77f8ca0e60689c19e41ec7d608ecc2fd65531eefe79ad98cf3cd77f3c6c1e412", size = 24839556, upload-time = "2025-09-12T16:04:27.011Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/68/fcb616bd86782c1445ad8b6d3b5ec40ce970adcda755deb5cc9347ba9fb0/crawlee-1.0.0rc1-py3-none-any.whl", hash = "sha256:748e54aea1884b2cc49e4cebbfb1842159dd2b93ae17284cd947fa8a066d137f", size = 274346, upload-time = "2025-08-22T06:46:27.035Z" }, + { url = "https://files.pythonhosted.org/packages/74/da/463751960f64e73b8388ef11b0b6f9fddc2776467440c9c841a295b5dc62/crawlee-0.6.13b37-py3-none-any.whl", hash = "sha256:ed10223e27b9c2791056110eca31f4c03b4ab4535c14307754fc7731bd59f70a", size = 278512, upload-time = "2025-09-12T16:04:23.576Z" }, ] [package.optional-dependencies] @@ -794,43 +794,50 @@ wheels = [ [[package]] name = "impit" -version = "0.5.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/53/c8/9d9f389521b254ac184410daa673c0b1272956acf499f85d6caa7e389363/impit-0.5.3.tar.gz", hash = "sha256:d1e60637620044f4cebe5c773890ad5825d6000b9741b880b05f1fa27d40bf48", size = 92865, upload-time = "2025-08-13T13:27:14.986Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/27/62060ce17059a8558d61ae320e56fd8a543bfe094789fc53e75b39db9977/impit-0.5.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c56b94e03248a23d9d7b32c257bec75f9faf475e17699eedce096b363c11b562", size = 3845861, upload-time = "2025-08-13T13:25:58.124Z" }, - { url = "https://files.pythonhosted.org/packages/df/3a/bb5d093d96f599e60f04ca701925ca1773e3b00673426775cf2203270096/impit-0.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df62f62190126fae89f9b53e697821df7cd6f3aa0cc0b96dd9d6c8a03abe18c2", size = 3672069, upload-time = "2025-08-13T13:26:00.535Z" }, - { url = "https://files.pythonhosted.org/packages/ce/c4/4c76d039a646a2e1c4efe55af0fab43ed516e1cbebb2e8e8bc8b049f53c6/impit-0.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b75399aa181da3e7d9400d5cd0716110cd4445977b7c3ff2839d57fa2f30dcdd", size = 6080651, upload-time = "2025-08-13T13:26:02.212Z" }, - { url = "https://files.pythonhosted.org/packages/04/c0/57a0dcda2e3d8c12c34a79553e0d69f00d58a352d25bd182cdd50770ae4f/impit-0.5.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0bd37f055ede7a7e5d552182658b820c0a307f4e0b1e2ddc508d77579b2624cb", size = 6380773, upload-time = "2025-08-13T13:26:03.895Z" }, - { url = "https://files.pythonhosted.org/packages/5a/1d/d80bdf2ef3258b2415fe418f74e5403514a2dacf064dd70af99594ec7d84/impit-0.5.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:600b11704bb8ff404f003a6275a2337750192f1a3deae4fa10aa2c961d7587bb", size = 6231628, upload-time = "2025-08-13T13:26:05.526Z" }, - { url = "https://files.pythonhosted.org/packages/54/53/0aa79afa0020ae20749050601629e2bf2f1ed9d68a28805e66f6d3bd3309/impit-0.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:aa67eb0071b482ec96bac6fac5cafe3bd86f6cefae36a66070342775907cc3ca", size = 3873476, upload-time = "2025-08-13T13:26:07.139Z" }, - { url = "https://files.pythonhosted.org/packages/8f/42/27efa4e8242b2b236d3f1273439259341f4ea3f61dc4b0bf2dd12cc3a425/impit-0.5.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c66ed9e5bbcd9ce2b0ff44bfa568d637bf3c2c61863c7495775a0e5464c8fffc", size = 3845871, upload-time = "2025-08-13T13:26:08.744Z" }, - { url = "https://files.pythonhosted.org/packages/83/8c/658cf77fec7abfb561818775bb0ab159ec52a749fc5dd9e3dbbd215c9cec/impit-0.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93d7f42d6d308553260f4ba69bb4725693191f332a219cb4f7d42f3f8cefbf8f", size = 3671992, upload-time = "2025-08-13T13:26:11.034Z" }, - { url = "https://files.pythonhosted.org/packages/f3/d8/dee3c3be6e1c2720b10cdbd285f2254a63d66509cf5591246d8de9b18bd8/impit-0.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51bee78ba62aa24dc58ca041206582ced8bcd45d3ec0878dc779d0286e01234f", size = 6080654, upload-time = "2025-08-13T13:26:12.641Z" }, - { url = "https://files.pythonhosted.org/packages/9b/3b/7e171f51b01f4ec3d00afff1e22a9c460295db4cf3d66f34467198cc1571/impit-0.5.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:408f161bc18b320bd6488a9192cc1608c21f175002a2539df944c5d9d83cc744", size = 6380952, upload-time = "2025-08-13T13:26:14.695Z" }, - { url = "https://files.pythonhosted.org/packages/99/3f/497592e3d6d31acfba7e32499e6ac895a33d68c033118bcbe84e19d9e333/impit-0.5.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d66848eb0cf15610334c366a74a30dbae38359a66040e97fdd3db55cb1c7c141", size = 6231660, upload-time = "2025-08-13T13:26:17.034Z" }, - { url = "https://files.pythonhosted.org/packages/92/ce/4e37de0d402adec3c522dfe118e31d04ce64d6738c575d34cc6054ff9d6b/impit-0.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:af9cfe6d857639bae5aa291e01a98ad3153ea92d245815484ca4b073d24c5975", size = 3873307, upload-time = "2025-08-13T13:26:19.037Z" }, - { url = "https://files.pythonhosted.org/packages/38/11/0bd34da1a301c9deb1da95503aed960ba2b1c50a63b0ecb17a6f3a291d7d/impit-0.5.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d0e384950939b3c016df82db29d45ba273582fd04d31c9b26bbe0601a7cd4e31", size = 3845487, upload-time = "2025-08-13T13:26:20.683Z" }, - { url = "https://files.pythonhosted.org/packages/05/3b/ffe8aff8bda7df5cc301625db069ffb3b8447b9c01e88641ab5bd9ce295f/impit-0.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8b818347a22ddcb3e558e8c3adf5bce16a6956788c8f076a8bccd96b1d559440", size = 3671137, upload-time = "2025-08-13T13:26:23.521Z" }, - { url = "https://files.pythonhosted.org/packages/7a/a7/85dbdf70b823970d9a50bd227068a160387c092b2d19dc61c52869bf98dc/impit-0.5.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cafa9c81e0aa5f3f75b7791f7fbbe22862293e9fe457f1ebe0c2f6a62ab6124", size = 6079679, upload-time = "2025-08-13T13:26:25.508Z" }, - { url = "https://files.pythonhosted.org/packages/4c/22/dcd59bf7839dd14e1c3bc5156f6f838a36cdb2a421ab22e3b78d010216f8/impit-0.5.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e4563388cdf155f8ecddc514b0eee0fc3c5d4f3cb62680093c7b018b1e58c135", size = 6379652, upload-time = "2025-08-13T13:26:27.298Z" }, - { url = "https://files.pythonhosted.org/packages/cc/0f/9fa7a13a111c9e0d2e13571d31a8d4ce918718f58c09a0ac91d2eb37ad79/impit-0.5.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9881e3125dd371c37a3222707b6642e2b0c29c59ea9beb1750fb0f13f85e54f5", size = 6228239, upload-time = "2025-08-13T13:26:28.929Z" }, - { url = "https://files.pythonhosted.org/packages/b4/9b/c026d4ed7ff2524497ac3cd04d77d22be2cb38bd20abd79888a18c06ab52/impit-0.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:8e5aa8799f3df6f77af8bc3fcb2ba0972afabef0850e8f0ee29b6884dafa2fe0", size = 3872515, upload-time = "2025-08-13T13:26:30.484Z" }, - { url = "https://files.pythonhosted.org/packages/0c/3a/930fa974f7bd0feb90eeb3995444d9f5d0b83b327bf94f3522f818c90f01/impit-0.5.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f5813136ca3745d8be9c3f99951bdb14cbacbc67dda8f4dac4dc71a67cc25de1", size = 3845375, upload-time = "2025-08-13T13:26:32.003Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a8/fbe6149d51ce0977643da2d30f1ef683e3b5993b70c32f4f8304dbad1e96/impit-0.5.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8f6838b6570be457219868a9ffb446934e0e066c59b37a1ac5e4a12e52d37ee4", size = 3671230, upload-time = "2025-08-13T13:26:33.592Z" }, - { url = "https://files.pythonhosted.org/packages/be/4e/0b272492f35d22ceed98217e06381dab060f9420c91d2e0fdb7f3cf351ac/impit-0.5.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0e102bf619dc1e6f11c353278de7b7d48d39d3f34a56619358f5660a63c4c13", size = 6080113, upload-time = "2025-08-13T13:26:35.573Z" }, - { url = "https://files.pythonhosted.org/packages/7a/12/83e36fed1e4ada02d2253fb9f984993ff1678c4e665d7b096c99b2bb2c3d/impit-0.5.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:02f2402bd9f6d1952e563bfc89c3055716c167b666a275d4a2e0d423dced77e8", size = 6379750, upload-time = "2025-08-13T13:26:37.526Z" }, - { url = "https://files.pythonhosted.org/packages/c3/69/d0eaf130e7dddca01e7a4ca33a9743fd4b72e20d7ebf79f445573664b5a4/impit-0.5.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:67a2d2ddeec1c255723beb6c2c1b03ece33aa1c1f69902fbd553aebc50930b84", size = 6228395, upload-time = "2025-08-13T13:26:39.753Z" }, - { url = "https://files.pythonhosted.org/packages/b8/6d/c59e76b0b32fc05bd56500db4bae164a92382c6c4ecb9c26bbee32edbd7d/impit-0.5.3-cp313-cp313-win_amd64.whl", hash = "sha256:6efab1a1e8cb890770716c4b3944e85387ef7ec85cd341e1bcca6821f88b4bd8", size = 3872809, upload-time = "2025-08-13T13:26:41.632Z" }, - { url = "https://files.pythonhosted.org/packages/dc/32/459f1619067d6dd4374b475fcb9a5288f6c0c009ce800553054792ede3b1/impit-0.5.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1904348747dd261cc44d9f2cba57994c3cd09c9c1a57022354483e132e3551a6", size = 6380287, upload-time = "2025-08-13T13:26:43.26Z" }, - { url = "https://files.pythonhosted.org/packages/84/0f/a6a07c677b6b8f590bdea4665808aa08016b9df4775373c37e8d39b0d79f/impit-0.5.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:316a405ef2837b82a94a99d260ea899a0899c97324e93fb2feafde520871044f", size = 6228540, upload-time = "2025-08-13T13:26:45.167Z" }, - { url = "https://files.pythonhosted.org/packages/26/9b/a77e3e9e72deb218b4b0f8833fa559cea14d9ce2ded979ad159d86092fd4/impit-0.5.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd04a339477148d98fa22688b018c83e801890c3d279dda218c4b3f8bfc17a79", size = 6079959, upload-time = "2025-08-13T13:26:47.129Z" }, - { url = "https://files.pythonhosted.org/packages/be/80/5cb18728a35dac8e9eaaf80faa73cafd420193de6c38cd2d3d1358c9508e/impit-0.5.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8557ec98cb0da3ce895ea4f4251b77b331b8d9aa294be67d38fa6619eb7fa0a1", size = 6080789, upload-time = "2025-08-13T13:27:00.317Z" }, - { url = "https://files.pythonhosted.org/packages/07/8e/58e31d45496aa2347ed3fe041b130dee76b9f07863f613af55d4e29cd5e1/impit-0.5.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:166d1854bb39c49a564ca79806ab2108db574b870e1dccf0fcf9cd9a2756b1a7", size = 6380588, upload-time = "2025-08-13T13:27:02.427Z" }, - { url = "https://files.pythonhosted.org/packages/bc/9b/a928218c729763628079cb635e91fc318488b4028d7f4f87d693562870fc/impit-0.5.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ebffdb5cadd5331e79b080b9c2562d7cc527aa1e68b6a2541aa4d1da994acc5e", size = 6231759, upload-time = "2025-08-13T13:27:04.147Z" }, - { url = "https://files.pythonhosted.org/packages/58/7b/43e8bfc3dfe8cf27bcc92ca31f1d179abec163e0d4520838a11ca225c202/impit-0.5.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6283f3e18d96e9dfd11ede7a62befe62e389a6ed5a0da7b7d97b80d91464bc77", size = 6081197, upload-time = "2025-08-13T13:27:05.769Z" }, - { url = "https://files.pythonhosted.org/packages/ec/56/e659b5161a2da471d58bba7897a1c6ef0f5faeb39aaa2836e0b9f81e1be1/impit-0.5.3-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:fcb69f075b0365ac04d17c1debbc9b3ee65b7eeac7fd6396a2c946ab8f3a8c0f", size = 6380852, upload-time = "2025-08-13T13:27:07.425Z" }, - { url = "https://files.pythonhosted.org/packages/24/b8/905597c2c53faeb8a08bf1c2c4663ba60afe73185008067bb652bae8f3f5/impit-0.5.3-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:8402894065839a56a02b517e8c9194ac948c40ff74faf8b8641c3208e8337fd8", size = 6232025, upload-time = "2025-08-13T13:27:09.161Z" }, +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/eb/2ce605322052906eda03344c8318b213b9f804ad903f83d104d6214dae64/impit-0.7.1.tar.gz", hash = "sha256:c0aeded889d08b7defa4652acc0c65a02f6ba5d9544df6f30bb04589489fe6dd", size = 94829, upload-time = "2025-09-08T10:48:29.512Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/31/0cfcd425ffe44afd6d6a67f19a732fce7b83cf342a13a8cd8e3f7364731b/impit-0.7.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5a791baade21d4e6cd62409cf22f710e23e475c671871ea674b9d358b4ba9604", size = 3862290, upload-time = "2025-09-08T10:46:50.176Z" }, + { url = "https://files.pythonhosted.org/packages/d0/ef/6e7206e08cda472b8955b5fa6015dc7412fd901e24215510c87dcf6a34d4/impit-0.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:42c74c02fa03197e4b16414c4f25ad0934ab6db669bc30f5ce40cf3ff33272f5", size = 3691913, upload-time = "2025-09-08T10:46:53.641Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/d4b49c9f968041889b9eec1cd9bea7d031b91da31b2b2d1e7885edfc7bd5/impit-0.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a721ed6a2778aaf0b5a399a8dcb757aa68ef8d99e5d978621d26baf81b516fe", size = 6103945, upload-time = "2025-09-08T10:46:55.706Z" }, + { url = "https://files.pythonhosted.org/packages/ae/1d/d5a8d454cfb330f99e17da2af8c68afa9ceaf0999ca61591663f34931644/impit-0.7.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:e691a3d17250bd2d0ac7430cae418b784c8e27d41dd788dc368e939987b135c8", size = 6051422, upload-time = "2025-09-08T10:46:57.419Z" }, + { url = "https://files.pythonhosted.org/packages/5f/13/e7c82cc949a9a23db5e1131f5901800327903829cb7985596e7e409910de/impit-0.7.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:37b3dbdbcea0e9f18da0a4062e03f056e5f77521b9c4a09d00bca6cce84a1ee1", size = 6401516, upload-time = "2025-09-08T10:46:59.455Z" }, + { url = "https://files.pythonhosted.org/packages/72/ad/ef5a0e91c32aa2c621f8789c57cb1daf69b1b0b1b15bbab2ec3bcfc5658b/impit-0.7.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9c34c31d47fcb7427b6927911b908a72b04d3783a17b5969db6b232ccf655152", size = 6257993, upload-time = "2025-09-08T10:47:01.577Z" }, + { url = "https://files.pythonhosted.org/packages/4e/31/4bdb8e77b7c5b71b7f8e690674b7f50227453663e11bc1cf6448d0b1a421/impit-0.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:ec8f04bf7681a8a7cba84e8d3f4f800c01342c6b72b8660a858626dc373b52dd", size = 3872068, upload-time = "2025-09-08T10:47:03.374Z" }, + { url = "https://files.pythonhosted.org/packages/bd/11/878f37d5a5aad658ae9f9e7a8b9a6802285108bbe5ddd48c8975667e5fe5/impit-0.7.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:7757f5bd4ee922d67d64cab438791fd88da2985282d71a7272afe185ae5d89f3", size = 3862299, upload-time = "2025-09-08T10:47:04.887Z" }, + { url = "https://files.pythonhosted.org/packages/df/c4/6f9f754680f0b6abcc4c87704a4d6d82c6c1e30fec6ecb620e4457cc6b0f/impit-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07ebb816aabbc121d1d7d135e47d7126e834fffa211da60589c7ed97f00aa2e2", size = 3688743, upload-time = "2025-09-08T10:47:06.845Z" }, + { url = "https://files.pythonhosted.org/packages/62/1c/fbbe2ca1a230096e8d455aa1483571c0277d552a854aa161db8e14f6aa50/impit-0.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88fc88ab3dea6931feeb64510c66aa9358fff22712b2540d3fa7f4cae522b419", size = 6103684, upload-time = "2025-09-08T10:47:08.709Z" }, + { url = "https://files.pythonhosted.org/packages/32/e4/91a4ed986a942887e2d1a54f8da5c70ff60b6310cdd09df0b1a71cbf0320/impit-0.7.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:0b39016a23e13469755f1d00383099d57b48bae1804e9b40c9d41afc2aa31764", size = 6051490, upload-time = "2025-09-08T10:47:10.776Z" }, + { url = "https://files.pythonhosted.org/packages/55/ba/eca41845dcadb9d201ad63ec50ad1727493391b3d92205437933c103d057/impit-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:78b515b9ab287af150966a6d42f860f4c23aab18e384c34e0f746fa6030a3863", size = 6401440, upload-time = "2025-09-08T10:47:12.801Z" }, + { url = "https://files.pythonhosted.org/packages/70/1a/f1d2e2244f15abc76f641a11feaae2ff4fa139f0cae4e765edd0266d02ea/impit-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7c8795b050f77296af5305a97ab8debc7e4d931753ab5926421c16e38972c298", size = 6257891, upload-time = "2025-09-08T10:47:14.939Z" }, + { url = "https://files.pythonhosted.org/packages/c1/94/3f0796d8af75ef3466810d93a817031607fd41b13444871ffe108caf7a8f/impit-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:dcd6636fe069323c5f5a7321d35d17d9b5331315c89b699b0f2863c6e846d54d", size = 3872067, upload-time = "2025-09-08T10:47:17.635Z" }, + { url = "https://files.pythonhosted.org/packages/22/a6/55f438a932f665a7d84a9599d274f84bf6a1a5106422d3266b7720159b39/impit-0.7.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7381dd958fb3243b287042e2ed2daeda08b1b341c90e6b14163a2ff691e4a8da", size = 3862388, upload-time = "2025-09-08T10:47:19.709Z" }, + { url = "https://files.pythonhosted.org/packages/73/4c/f630a44fe8480accacb593c4a3312b72960d84988df155e1c3a12b129657/impit-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86346180991afbeeb540514cc82f200b752cf9de662f8f11381da387c9474ac8", size = 3690890, upload-time = "2025-09-08T10:47:21.312Z" }, + { url = "https://files.pythonhosted.org/packages/8b/8d/b8cfc7ca71212620f70a5589d040b3b64367e6d7e086069141f730f1c3d4/impit-0.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c223e53c5e5eaf543bd307508c62b536a38c823e71552d652c5cc0196265ea4", size = 6102766, upload-time = "2025-09-08T10:47:23.076Z" }, + { url = "https://files.pythonhosted.org/packages/14/1d/86decd4f47dc3300d5cd84d61b4ef85f8bcce23d8bf1dccd1752fdbc8f60/impit-0.7.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:82e6c7398f13aad321ae54842f3f753fef9d44dc0ac602793fcf53c0c0e53a7f", size = 6046205, upload-time = "2025-09-08T10:47:25.499Z" }, + { url = "https://files.pythonhosted.org/packages/f0/61/20c252e7559e4eccc53d3f2db97435c21435621022bfb4f567f7327df5f9/impit-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:dc5c296c3d3ad4d5f0f62b102444d72acc748a9517dc8c0677de63eaa04dfec7", size = 6400586, upload-time = "2025-09-08T10:47:28.684Z" }, + { url = "https://files.pythonhosted.org/packages/15/1a/acf90ec454fa87b2a9817d3be92f1c4d7524e2b01e94f42584dfa4c8a314/impit-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:929ebdf3bd6d85632c8ac6dd6fa1a74cc160f1189d2b8b2136eece22307c2f99", size = 6257123, upload-time = "2025-09-08T10:47:30.861Z" }, + { url = "https://files.pythonhosted.org/packages/d0/67/ea0604fe65ef30d7869369c1febf9c424370133bb10a2ccdf8ec035a9fec/impit-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:049f96a81758b3eb1e0c5b045725339b4b6ddb77a9c0b080c82f69eb0de1453a", size = 3870677, upload-time = "2025-09-08T10:47:32.876Z" }, + { url = "https://files.pythonhosted.org/packages/f9/f8/94584445b21256705714f1cf809451c425750ead967b33b863a7ed6bc8b3/impit-0.7.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3508c326791f10a3709986e1b5f3e54dba336c3df60488295e371789322f5d65", size = 3862281, upload-time = "2025-09-08T10:47:34.638Z" }, + { url = "https://files.pythonhosted.org/packages/88/78/7e62084927ef0756a750a52de9c8dc05035bed5f8fd522740c4a771ffe75/impit-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0fcb7b9a7ef770a10e186b42cb6cf573ed8f4a1f30a537d0233226315bfafdc0", size = 3691035, upload-time = "2025-09-08T10:47:36.218Z" }, + { url = "https://files.pythonhosted.org/packages/bf/2d/ee62826e0edb0f66afca3c4348ea1591318e27848097d49d930765183807/impit-0.7.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f3813015b764fbb4d01e11a600376ad4bf6346ac32ce61fd9b60c0c1818929", size = 6102958, upload-time = "2025-09-08T10:47:38.319Z" }, + { url = "https://files.pythonhosted.org/packages/1d/48/3552142c70a41285cfae3e45401c0720edc047ed8ebf72412d2ccefe6035/impit-0.7.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ac088dd0f5eeff5584593e54eba4e0af307632947dcbe1e839433d6a100eec05", size = 6046205, upload-time = "2025-09-08T10:47:40.113Z" }, + { url = "https://files.pythonhosted.org/packages/ac/b9/9756e7146f9ff6f53e41c834b7a4f80d9cb3987c522f43755b40288e0d90/impit-0.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7aae188c904a27c9d587009be63604f691ea2b8eba627cc256508a0ae99a1deb", size = 6400556, upload-time = "2025-09-08T10:47:41.845Z" }, + { url = "https://files.pythonhosted.org/packages/61/bd/1c947842b9ab464c9a93ec4c28457535451377dd32e338b66e31c3924e24/impit-0.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:81723a2ea00d447703223577b3e1a4a5fafe4227a210b88e5e2cd1081ed9b3d1", size = 6256999, upload-time = "2025-09-08T10:47:44.49Z" }, + { url = "https://files.pythonhosted.org/packages/d4/d2/6d112234e2860ffccdcfcc4c3a4bfe69bed621d2bfd0dd3c0e56118b94a2/impit-0.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:1e1c9024ccafd6a4e3b4cf390fc8aa039cd666fb2c85fd7ac8c20e0304a0d2dc", size = 3870607, upload-time = "2025-09-08T10:47:46.15Z" }, + { url = "https://files.pythonhosted.org/packages/ba/98/7fc60cda1862893790db81c634f563d43b9280a76ba5c9f9e983865b9fa9/impit-0.7.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:7e1aa90f7a944dc33031499d06d618ac20c69c7ba7e40bc1480b0cb203985b61", size = 6045764, upload-time = "2025-09-08T10:47:48.383Z" }, + { url = "https://files.pythonhosted.org/packages/55/3d/2fc7b098956dbe3e6d9e43680d33334db483844a651a89e9b2974166210b/impit-0.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6ac90a37f38b0a196a9b00713d4c28bca705fceae332ea5290a262b2435560ba", size = 6400974, upload-time = "2025-09-08T10:47:50.178Z" }, + { url = "https://files.pythonhosted.org/packages/2e/13/169357aa83925d213d26e548150e471142b565f6f58ee4e8afa3c31113d8/impit-0.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:23abfc85f3b98eeee6c3be5c5c08ec8b97d483dd6e08cdabc151b9210713ae2d", size = 6258112, upload-time = "2025-09-08T10:47:51.946Z" }, + { url = "https://files.pythonhosted.org/packages/ea/b4/343b5008887d9746c920743c9d8a40dd9c4f7c06a618cda0222931bf5980/impit-0.7.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:962022ae1bdafc1d8296746e1b68385b604e119cfc80c57ed1a2d7c512b251e6", size = 6103684, upload-time = "2025-09-08T10:47:53.712Z" }, + { url = "https://files.pythonhosted.org/packages/b4/3f/5c013d5dfe5bda1ce0b78244718b7507739dacae85b584bac1ae6d95b711/impit-0.7.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8327eff29263338533b60f4af4092b0483c099c5a569a5bc340bb92d0b29d460", size = 6104423, upload-time = "2025-09-08T10:48:08.437Z" }, + { url = "https://files.pythonhosted.org/packages/51/cb/3ec260ce1860b7ccffbe83d29fef685629cf3260ab7bd3923721a7ba621e/impit-0.7.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:aa0c64dae700a52ee7c694d43bd7d0f2fd3374b995451915e7106c48e14d9417", size = 6051790, upload-time = "2025-09-08T10:48:10.267Z" }, + { url = "https://files.pythonhosted.org/packages/46/b9/04c1f197bcadfeb534193d8d4670460e643fdbd9da83f223842e7af05420/impit-0.7.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:9af9bfad7c0b94025a4c9a9e81122e52e6b0f4f55c3593dc3dfb42738fb6547f", size = 6402278, upload-time = "2025-09-08T10:48:11.971Z" }, + { url = "https://files.pythonhosted.org/packages/59/e2/a5543dc981d97ac3a10112cf75d2d60fb22baf7870b5fce0b052866c91f3/impit-0.7.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9c120eb55dd98b6f0fae94d62c0e7d0db249f63ce4f49a5770f8d6aeb86c099d", size = 6258198, upload-time = "2025-09-08T10:48:13.64Z" }, + { url = "https://files.pythonhosted.org/packages/dc/3c/281c3f36b1d51d1addd638a553b72f922872108d1028cff4baf300218d9b/impit-0.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39ba8b54fa090e79751caacce0cd6cba69c484e7a7136a7e9b63fdbf1beff0bf", size = 6103562, upload-time = "2025-09-08T10:48:15.455Z" }, + { url = "https://files.pythonhosted.org/packages/76/c0/eec65578c879a5ccecc991685ac6726cdaa674a81a6370075e5f3c8aca64/impit-0.7.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:23fbab708afd4df8950bde9d0229138df928ccf8374c14ccb8a0805d1e56de32", size = 6046675, upload-time = "2025-09-08T10:48:17.18Z" }, + { url = "https://files.pythonhosted.org/packages/48/20/674eb41382e41665f7e12e0eb909b1cc4e33eaba80670d7bf9696d92891e/impit-0.7.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8bc839d78f214fc8de94d5af2642f8dab7bcca8428eee0c2a46403a537bdef15", size = 6401223, upload-time = "2025-09-08T10:48:19.317Z" }, + { url = "https://files.pythonhosted.org/packages/cf/1d/d2874c50ec29e66f2e22e569686d8cdcfa30ff1600265f64c33fdae8fc8a/impit-0.7.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3f6449a9f8d4c0eb547e6b821fb622f8e84dcb19264e0abf5a49d7e0c71e3792", size = 6257905, upload-time = "2025-09-08T10:48:21.752Z" }, ] [[package]]