diff --git a/docs/03_concepts/code/03_rq.py b/docs/03_concepts/code/03_rq.py index fe1ea605..e9ad6a51 100644 --- a/docs/03_concepts/code/03_rq.py +++ b/docs/03_concepts/code/03_rq.py @@ -20,13 +20,10 @@ async def main() -> None: # If you try to add an existing request again, it will not do anything add_request_info = await queue.add_request( - Request.from_url('http://different-example.com/5') + Request.from_url('http://example.com/5') ) Actor.log.info(f'Add request info: {add_request_info}') - processed_request = await queue.get_request(add_request_info.id) - Actor.log.info(f'Processed request: {processed_request}') - # Finally, process the queue until all requests are handled while not await queue.is_finished(): # Fetch the next unhandled request in the queue diff --git a/docs/04_upgrading/upgrading_to_v3.md b/docs/04_upgrading/upgrading_to_v3.md index eba1f2d4..d9f179e5 100644 --- a/docs/04_upgrading/upgrading_to_v3.md +++ b/docs/04_upgrading/upgrading_to_v3.md @@ -1,6 +1,6 @@ --- -id: upgrading-to-v2 -title: Upgrading to v2 +id: upgrading-to-v3 +title: Upgrading to v3 --- This page summarizes the breaking changes between Apify Python SDK v2.x and v3.0. diff --git a/src/apify/scrapy/requests.py b/src/apify/scrapy/requests.py index 63bba3c7..27328c5c 100644 --- a/src/apify/scrapy/requests.py +++ b/src/apify/scrapy/requests.py @@ -122,7 +122,7 @@ def to_scrapy_request(apify_request: ApifyRequest, spider: Spider) -> ScrapyRequ # Update the meta field with the meta field from the apify_request meta = scrapy_request.meta or {} - meta.update({'apify_request_id': apify_request.id, 'apify_request_unique_key': apify_request.unique_key}) + meta.update({'apify_request_unique_key': apify_request.unique_key}) # scrapy_request.meta is a property, so we have to set it like this scrapy_request._meta = meta # noqa: SLF001 @@ -134,7 +134,6 @@ def to_scrapy_request(apify_request: ApifyRequest, spider: Spider) -> ScrapyRequ url=apify_request.url, method=apify_request.method, meta={ - 'apify_request_id': apify_request.id, 'apify_request_unique_key': apify_request.unique_key, }, ) diff --git a/src/apify/storage_clients/_apify/_models.py b/src/apify/storage_clients/_apify/_models.py index df981121..993ea8db 100644 --- a/src/apify/storage_clients/_apify/_models.py +++ b/src/apify/storage_clients/_apify/_models.py @@ -94,8 +94,8 @@ class CachedRequest(BaseModel): Only internal structure. """ - id: str - """The ID of the request.""" + unique_key: str + """Unique key of the request.""" was_already_handled: bool """Whether the request was already handled.""" diff --git a/src/apify/storage_clients/_apify/_request_queue_client.py b/src/apify/storage_clients/_apify/_request_queue_client.py index c9e7031a..953a4d81 100644 --- a/src/apify/storage_clients/_apify/_request_queue_client.py +++ b/src/apify/storage_clients/_apify/_request_queue_client.py @@ -1,8 +1,11 @@ from __future__ import annotations import asyncio +import re +from base64 import b64encode from collections import deque from datetime import datetime, timedelta, timezone +from hashlib import sha256 from logging import getLogger from typing import TYPE_CHECKING, Final @@ -10,7 +13,6 @@ from typing_extensions import override from apify_client import ApifyClientAsync -from crawlee._utils.requests import unique_key_to_request_id from crawlee.storage_clients._base import RequestQueueClient from crawlee.storage_clients.models import AddRequestsResponse, ProcessedRequest, RequestQueueMetadata @@ -27,6 +29,29 @@ logger = getLogger(__name__) +def unique_key_to_request_id(unique_key: str, *, request_id_length: int = 15) -> str: + """Generate a deterministic request ID based on a unique key. + + Args: + unique_key: The unique key to convert into a request ID. + request_id_length: The length of the request ID. + + Returns: + A URL-safe, truncated request ID based on the unique key. + """ + # Encode the unique key and compute its SHA-256 hash + hashed_key = sha256(unique_key.encode('utf-8')).digest() + + # Encode the hash in base64 and decode it to get a string + base64_encoded = b64encode(hashed_key).decode('utf-8') + + # Remove characters that are not URL-safe ('+', '/', or '=') + url_safe_key = re.sub(r'(\+|\/|=)', '', base64_encoded) + + # Truncate the key to the desired length + return url_safe_key[:request_id_length] + + class ApifyRequestQueueClient(RequestQueueClient): """An Apify platform implementation of the request queue client.""" @@ -59,10 +84,10 @@ def __init__( """The name of the request queue.""" self._queue_head = deque[str]() - """A deque to store request IDs in the queue head.""" + """A deque to store request unique keys in the queue head.""" self._requests_cache: LRUCache[str, CachedRequest] = LRUCache(maxsize=self._MAX_CACHED_REQUESTS) - """A cache to store request objects. Request ID is used as the cache key.""" + """A cache to store request objects. Request unique key is used as the cache key.""" self._queue_has_locked_requests: bool | None = None """Whether the queue has requests locked by another client.""" @@ -248,14 +273,13 @@ async def add_batch_of_requests( already_present_requests: list[ProcessedRequest] = [] for request in requests: - if self._requests_cache.get(request.id): + if self._requests_cache.get(request.unique_key): # We are not sure if it was already handled at this point, and it is not worth calling API for it. # It could have been handled by another client in the meantime, so cached information about # `request.was_already_handled` is not reliable. already_present_requests.append( ProcessedRequest.model_validate( { - 'id': request.id, 'uniqueKey': request.unique_key, 'wasAlreadyPresent': True, 'wasAlreadyHandled': request.was_already_handled, @@ -267,14 +291,13 @@ async def add_batch_of_requests( # Add new request to the cache. processed_request = ProcessedRequest.model_validate( { - 'id': request.id, 'uniqueKey': request.unique_key, 'wasAlreadyPresent': True, 'wasAlreadyHandled': request.was_already_handled, } ) self._cache_request( - unique_key_to_request_id(request.unique_key), + request.unique_key, processed_request, ) new_requests.append(request) @@ -299,7 +322,7 @@ async def add_batch_of_requests( # Remove unprocessed requests from the cache for unprocessed_request in api_response.unprocessed_requests: - self._requests_cache.pop(unique_key_to_request_id(unprocessed_request.unique_key), None) + self._requests_cache.pop(unprocessed_request.unique_key, None) else: api_response = AddRequestsResponse.model_validate( @@ -323,16 +346,16 @@ async def add_batch_of_requests( return api_response @override - async def get_request(self, request_id: str) -> Request | None: - """Get a request by ID. + async def get_request(self, unique_key: str) -> Request | None: + """Get a request by unique key. Args: - request_id: The ID of the request to get. + unique_key: Unique key of the request to get. Returns: The request or None if not found. """ - response = await self._api_client.get_request(request_id) + response = await self._api_client.get_request(unique_key_to_request_id(unique_key)) if response is None: return None @@ -360,15 +383,15 @@ async def fetch_next_request(self) -> Request | None: return None # Get the next request ID from the queue head - next_request_id = self._queue_head.popleft() + next_unique_key = self._queue_head.popleft() - request = await self._get_or_hydrate_request(next_request_id) + request = await self._get_or_hydrate_request(next_unique_key) # Handle potential inconsistency where request might not be in the main table yet if request is None: logger.debug( 'Cannot find a request from the beginning of queue, will be retried later', - extra={'nextRequestId': next_request_id}, + extra={'nextRequestUniqueKey': next_unique_key}, ) return None @@ -376,16 +399,16 @@ async def fetch_next_request(self) -> Request | None: if request.handled_at is not None: logger.debug( 'Request fetched from the beginning of queue was already handled', - extra={'nextRequestId': next_request_id}, + extra={'nextRequestUniqueKey': next_unique_key}, ) return None # Use get request to ensure we have the full request object. - request = await self.get_request(request.id) + request = await self.get_request(request.unique_key) if request is None: logger.debug( 'Request fetched from the beginning of queue was not found in the RQ', - extra={'nextRequestId': next_request_id}, + extra={'nextRequestUniqueKey': next_unique_key}, ) return None @@ -407,7 +430,7 @@ async def mark_request_as_handled(self, request: Request) -> ProcessedRequest | if request.handled_at is None: request.handled_at = datetime.now(tz=timezone.utc) - if cached_request := self._requests_cache[request.id]: + if cached_request := self._requests_cache[request.unique_key]: cached_request.was_already_handled = request.was_already_handled try: # Update the request in the API @@ -419,14 +442,14 @@ async def mark_request_as_handled(self, request: Request) -> ProcessedRequest | self._assumed_handled_count += 1 # Update the cache with the handled request - cache_key = unique_key_to_request_id(request.unique_key) + cache_key = request.unique_key self._cache_request( cache_key, processed_request, hydrated_request=request, ) except Exception as exc: - logger.debug(f'Error marking request {request.id} as handled: {exc!s}') + logger.debug(f'Error marking request {request.unique_key} as handled: {exc!s}') return None else: return processed_request @@ -467,7 +490,7 @@ async def reclaim_request( self._assumed_handled_count -= 1 # Update the cache - cache_key = unique_key_to_request_id(request.unique_key) + cache_key = request.unique_key self._cache_request( cache_key, processed_request, @@ -481,11 +504,11 @@ async def reclaim_request( # Try to release the lock on the request try: - await self._delete_request_lock(request.id, forefront=forefront) + await self._delete_request_lock(request.unique_key, forefront=forefront) except Exception as err: - logger.debug(f'Failed to delete request lock for request {request.id}', exc_info=err) + logger.debug(f'Failed to delete request lock for request {request.unique_key}', exc_info=err) except Exception as exc: - logger.debug(f'Error reclaiming request {request.id}: {exc!s}') + logger.debug(f'Error reclaiming request {request.unique_key}: {exc!s}') return None else: return processed_request @@ -512,17 +535,17 @@ async def _ensure_head_is_non_empty(self) -> None: # Fetch requests from the API and populate the queue head await self._list_head(lock_time=self._DEFAULT_LOCK_TIME) - async def _get_or_hydrate_request(self, request_id: str) -> Request | None: - """Get a request by ID, either from cache or by fetching from API. + async def _get_or_hydrate_request(self, unique_key: str) -> Request | None: + """Get a request by unique key, either from cache or by fetching from API. Args: - request_id: The ID of the request to get. + unique_key: Unique keu of the request to get. Returns: The request if found and valid, otherwise None. """ # First check if the request is in our cache - cached_entry = self._requests_cache.get(request_id) + cached_entry = self._requests_cache.get(unique_key) if cached_entry and cached_entry.hydrated: # If we have the request hydrated in cache, check if lock is expired @@ -530,11 +553,11 @@ async def _get_or_hydrate_request(self, request_id: str) -> Request | None: # Try to prolong the lock if it's expired try: lock_secs = int(self._DEFAULT_LOCK_TIME.total_seconds()) - response = await self._prolong_request_lock(request_id, lock_secs=lock_secs) + response = await self._prolong_request_lock(unique_key, lock_secs=lock_secs) cached_entry.lock_expires_at = response.lock_expires_at except Exception: # If prolonging the lock fails, we lost the request - logger.debug(f'Failed to prolong lock for request {request_id}, returning None') + logger.debug(f'Failed to prolong lock for request {unique_key}, returning None') return None return cached_entry.hydrated @@ -543,22 +566,21 @@ async def _get_or_hydrate_request(self, request_id: str) -> Request | None: try: # Try to acquire or prolong the lock lock_secs = int(self._DEFAULT_LOCK_TIME.total_seconds()) - await self._prolong_request_lock(request_id, lock_secs=lock_secs) + await self._prolong_request_lock(unique_key, lock_secs=lock_secs) # Fetch the request data - request = await self.get_request(request_id) + request = await self.get_request(unique_key) # If request is not found, release lock and return None if not request: - await self._delete_request_lock(request_id) + await self._delete_request_lock(unique_key) return None # Update cache with hydrated request - cache_key = unique_key_to_request_id(request.unique_key) + cache_key = request.unique_key self._cache_request( cache_key, ProcessedRequest( - id=request_id, unique_key=request.unique_key, was_already_present=True, was_already_handled=request.handled_at is not None, @@ -566,7 +588,7 @@ async def _get_or_hydrate_request(self, request_id: str) -> Request | None: hydrated_request=request, ) except Exception as exc: - logger.debug(f'Error fetching or locking request {request_id}: {exc!s}') + logger.debug(f'Error fetching or locking request {unique_key}: {exc!s}') return None else: return request @@ -586,13 +608,15 @@ async def _update_request( Returns: The updated request """ + request_dict = request.model_dump(by_alias=True) + request_dict['id'] = unique_key_to_request_id(request.unique_key) response = await self._api_client.update_request( - request=request.model_dump(by_alias=True), + request=request_dict, forefront=forefront, ) return ProcessedRequest.model_validate( - {'id': request.id, 'uniqueKey': request.unique_key} | response, + {'uniqueKey': request.unique_key} | response, ) async def _list_head( @@ -616,8 +640,8 @@ async def _list_head( logger.debug(f'Using cached queue head with {len(self._queue_head)} requests') # Create a list of requests from the cached queue head items = [] - for request_id in list(self._queue_head)[:limit]: - cached_request = self._requests_cache.get(request_id) + for unique_key in list(self._queue_head)[:limit]: + cached_request = self._requests_cache.get(unique_key) if cached_request and cached_request.hydrated: items.append(cached_request.hydrated) @@ -653,11 +677,10 @@ async def _list_head( request = Request.model_validate(request_data) # Skip requests without ID or unique key - if not request.id or not request.unique_key: + if not request.unique_key: logger.debug( 'Skipping request from queue head, missing ID or unique key', extra={ - 'id': request.id, 'unique_key': request.unique_key, }, ) @@ -665,39 +688,38 @@ async def _list_head( # Cache the request self._cache_request( - unique_key_to_request_id(request.unique_key), + request.unique_key, ProcessedRequest( - id=request.id, unique_key=request.unique_key, was_already_present=True, was_already_handled=False, ), hydrated_request=request, ) - self._queue_head.append(request.id) + self._queue_head.append(request.unique_key) - for leftover_request_id in leftover_buffer: + for leftover_unique_key in leftover_buffer: # After adding new requests to the forefront, any existing leftover locked request is kept in the end. - self._queue_head.append(leftover_request_id) + self._queue_head.append(leftover_unique_key) return RequestQueueHead.model_validate(response) async def _prolong_request_lock( self, - request_id: str, + unique_key: str, *, lock_secs: int, ) -> ProlongRequestLockResponse: """Prolong the lock on a specific request in the queue. Args: - request_id: The identifier of the request whose lock is to be prolonged. + unique_key: Unique key of the request whose lock is to be prolonged. lock_secs: The additional amount of time, in seconds, that the request will remain locked. Returns: A response containing the time at which the lock will expire. """ response = await self._api_client.prolong_request_lock( - request_id=request_id, + request_id=unique_key_to_request_id(unique_key), # All requests reaching this code were the tip of the queue at the moment when they were fetched, # so if their lock expires, they should be put back to the forefront as their handling is long overdue. forefront=True, @@ -710,7 +732,7 @@ async def _prolong_request_lock( # Update the cache with the new lock expiration for cached_request in self._requests_cache.values(): - if cached_request.id == request_id: + if cached_request.unique_key == unique_key: cached_request.lock_expires_at = result.lock_expires_at break @@ -718,29 +740,29 @@ async def _prolong_request_lock( async def _delete_request_lock( self, - request_id: str, + unique_key: str, *, forefront: bool = False, ) -> None: """Delete the lock on a specific request in the queue. Args: - request_id: ID of the request to delete the lock. + unique_key: Unique key of the request to delete the lock. forefront: Whether to put the request in the beginning or the end of the queue after the lock is deleted. """ try: await self._api_client.delete_request_lock( - request_id=request_id, + request_id=unique_key_to_request_id(unique_key), forefront=forefront, ) # Update the cache to remove the lock for cached_request in self._requests_cache.values(): - if cached_request.id == request_id: + if cached_request.unique_key == unique_key: cached_request.lock_expires_at = None break except Exception as err: - logger.debug(f'Failed to delete request lock for request {request_id}', exc_info=err) + logger.debug(f'Failed to delete request lock for request {unique_key}', exc_info=err) def _cache_request( self, @@ -758,7 +780,7 @@ def _cache_request( hydrated_request: The hydrated request object, if available. """ self._requests_cache[cache_key] = CachedRequest( - id=processed_request.id, + unique_key=processed_request.unique_key, was_already_handled=processed_request.was_already_handled, hydrated=hydrated_request, lock_expires_at=None, diff --git a/tests/integration/test_actor_request_queue.py b/tests/integration/test_actor_request_queue.py index a785f1ad..b9abc2a1 100644 --- a/tests/integration/test_actor_request_queue.py +++ b/tests/integration/test_actor_request_queue.py @@ -85,6 +85,7 @@ async def test_force_cloud( ) -> None: request_queue_id = (await apify_named_rq.get_metadata()).id request_info = await apify_named_rq.add_request(Request.from_url('http://example.com')) + assert request_info.id is not None request_queue_client = apify_client_async.request_queue(request_queue_id) request_queue_details = await request_queue_client.get() diff --git a/tests/integration/test_request_queue.py b/tests/integration/test_request_queue.py index fe9c50e5..1db730a7 100644 --- a/tests/integration/test_request_queue.py +++ b/tests/integration/test_request_queue.py @@ -399,38 +399,35 @@ async def main() -> None: assert run_result.status == 'SUCCEEDED' -async def test_get_request_by_id( +async def test_get_request_by_unique_key( make_actor: MakeActorFunction, run_actor: RunActorFunction, ) -> None: - """Test retrieving specific requests by their ID.""" + """Test retrieving specific requests by their unique_key.""" async def main() -> None: async with Actor: rq = await Actor.open_request_queue() Actor.log.info('Request queue opened') - # Add a request and get its ID + # Add a request and get its unique_key add_result = await rq.add_request('https://example.com/test') - request_id = add_result.id - Actor.log.info(f'Request added with ID: {request_id}') + request_unique_key = add_result.unique_key + Actor.log.info(f'Request added with unique_key: {request_unique_key}') - # Retrieve the request by ID - retrieved_request = await rq.get_request(request_id) + # Retrieve the request by unique_key + retrieved_request = await rq.get_request(request_unique_key) assert retrieved_request is not None, f'retrieved_request={retrieved_request}' assert retrieved_request.url == 'https://example.com/test', f'retrieved_request.url={retrieved_request.url}' - assert retrieved_request.id == request_id, ( - f'retrieved_request.id={retrieved_request.id}', - f'request_id={request_id}', - ) - Actor.log.info('Request retrieved successfully by ID') + assert retrieved_request.unique_key == request_unique_key, (f'{request_unique_key=}',) + Actor.log.info('Request retrieved successfully by unique_key') - # Test with non-existent ID - non_existent_request = await rq.get_request('non-existent-id') + # Test with non-existent unique_key + non_existent_request = await rq.get_request('non-existent-unique_key') assert non_existent_request is None, f'non_existent_request={non_existent_request}' - Actor.log.info('Non-existent ID correctly returned None') + Actor.log.info('Non-existent unique_key correctly returned None') - actor = await make_actor(label='rq-get-by-id-test', main_func=main) + actor = await make_actor(label='rq-get-by-unique-key-test', main_func=main) run_result = await run_actor(actor) assert run_result.status == 'SUCCEEDED' diff --git a/tests/unit/scrapy/requests/test_to_apify_request.py b/tests/unit/scrapy/requests/test_to_apify_request.py index 3c79fe1b..e69a7916 100644 --- a/tests/unit/scrapy/requests/test_to_apify_request.py +++ b/tests/unit/scrapy/requests/test_to_apify_request.py @@ -66,7 +66,6 @@ def test_with_id_and_unique_key(spider: Spider) -> None: url='https://example.com', method='GET', meta={ - 'apify_request_id': 'abc123', 'apify_request_unique_key': 'https://example.com', 'userData': {'some_user_data': 'hello'}, }, @@ -77,7 +76,6 @@ def test_with_id_and_unique_key(spider: Spider) -> None: assert apify_request.url == 'https://example.com' assert apify_request.method == 'GET' - assert apify_request.id == 'abc123' assert apify_request.unique_key == 'https://example.com' user_data = apify_request.user_data diff --git a/tests/unit/scrapy/requests/test_to_scrapy_request.py b/tests/unit/scrapy/requests/test_to_scrapy_request.py index 2b8f0ab7..13659527 100644 --- a/tests/unit/scrapy/requests/test_to_scrapy_request.py +++ b/tests/unit/scrapy/requests/test_to_scrapy_request.py @@ -36,7 +36,6 @@ def test_without_reconstruction(spider: Spider) -> None: assert isinstance(scrapy_request, Request) assert apify_request.url == scrapy_request.url assert apify_request.method == scrapy_request.method - assert apify_request.id == scrapy_request.meta.get('apify_request_id') assert apify_request.unique_key == scrapy_request.meta.get('apify_request_unique_key') @@ -56,7 +55,6 @@ def test_without_reconstruction_with_optional_fields(spider: Spider) -> None: assert isinstance(scrapy_request, Request) assert apify_request.url == scrapy_request.url assert apify_request.method == scrapy_request.method - assert apify_request.id == scrapy_request.meta.get('apify_request_id') assert apify_request.unique_key == scrapy_request.meta.get('apify_request_unique_key') scrapy_request_headers = scrapy_request.headers.get('authorization') @@ -82,7 +80,6 @@ def test_with_reconstruction(spider: Spider) -> None: assert isinstance(scrapy_request, Request) assert apify_request.url == scrapy_request.url assert apify_request.method == scrapy_request.method - assert apify_request.id == scrapy_request.meta.get('apify_request_id') assert apify_request.unique_key == scrapy_request.meta.get('apify_request_unique_key') assert apify_request.user_data == scrapy_request.meta.get('userData') @@ -106,7 +103,6 @@ def test_with_reconstruction_with_optional_fields(spider: Spider) -> None: assert isinstance(scrapy_request, Request) assert apify_request.url == scrapy_request.url assert apify_request.method == scrapy_request.method - assert apify_request.id == scrapy_request.meta.get('apify_request_id') assert apify_request.unique_key == scrapy_request.meta.get('apify_request_unique_key') scrapy_request_headers = scrapy_request.headers.get('authorization') diff --git a/tests/unit/storage_clients/test_apify_request_queue_client.py b/tests/unit/storage_clients/test_apify_request_queue_client.py new file mode 100644 index 00000000..019b2e0b --- /dev/null +++ b/tests/unit/storage_clients/test_apify_request_queue_client.py @@ -0,0 +1,38 @@ +import pytest + +from apify.storage_clients._apify._request_queue_client import unique_key_to_request_id + + +def test_unique_key_to_request_id_length() -> None: + unique_key = 'exampleKey123' + request_id = unique_key_to_request_id(unique_key, request_id_length=15) + assert len(request_id) == 15, 'Request ID should have the correct length.' + + +def test_unique_key_to_request_id_consistency() -> None: + unique_key = 'consistentKey' + request_id_1 = unique_key_to_request_id(unique_key) + request_id_2 = unique_key_to_request_id(unique_key) + assert request_id_1 == request_id_2, 'The same unique key should generate consistent request IDs.' + + +@pytest.mark.parametrize( + ('unique_key', 'expected_request_id'), + [ + ('abc', 'ungWv48BzpBQUDe'), + ('uniqueKey', 'xiWPs083cree7mH'), + ('', '47DEQpj8HBSaTIm'), + ('测试中文', 'lKPdJkdvw8MXEUp'), + ('test+/=', 'XZRQjhoG0yjfnYD'), + ], + ids=[ + 'basic_abc', + 'keyword_uniqueKey', + 'empty_string', + 'non_ascii_characters', + 'url_unsafe_characters', + ], +) +def test_unique_key_to_request_id_matches_known_values(unique_key: str, expected_request_id: str) -> None: + request_id = unique_key_to_request_id(unique_key) + assert request_id == expected_request_id, f'Unique key "{unique_key}" should produce the expected request ID.' diff --git a/uv.lock b/uv.lock index 5a238629..d868a680 100644 --- a/uv.lock +++ b/uv.lock @@ -478,7 +478,7 @@ toml = [ [[package]] name = "crawlee" version = "0.6.13" -source = { git = "https://github.com/apify/crawlee-python.git?rev=master#454de75b1516bed68dab69a3663e563704d55ce2" } +source = { git = "https://github.com/apify/crawlee-python.git?rev=master#0650b7e097751b0cf6b190ef4c25b05e44169389" } dependencies = [ { name = "cachetools" }, { name = "colorama" }, @@ -489,8 +489,6 @@ dependencies = [ { name = "pydantic" }, { name = "pydantic-settings" }, { name = "pyee" }, - { name = "sortedcollections" }, - { name = "sortedcontainers" }, { name = "tldextract" }, { name = "typing-extensions" }, { name = "yarl" }, @@ -1860,7 +1858,7 @@ wheels = [ [[package]] name = "requests" -version = "2.32.4" +version = "2.32.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -1868,9 +1866,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] [[package]] @@ -1973,27 +1971,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, ] -[[package]] -name = "sortedcollections" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "sortedcontainers" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/01/00/6d749cc1f88e7f95f5442a8abb195fa607094deba9e0475affbfb7fa8c04/sortedcollections-2.1.0.tar.gz", hash = "sha256:d8e9609d6c580a16a1224a3dc8965789e03ebc4c3e5ffd05ada54a2fed5dcacd", size = 9287, upload-time = "2021-01-18T22:15:16.623Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/39/c993a7d0c9dbf3aeca5008bdd00e4436ad9b7170527cef0a14634b47001f/sortedcollections-2.1.0-py3-none-any.whl", hash = "sha256:b07abbc73472cc459da9dd6e2607d73d1f3b9309a32dd9a57fa2c6fa882f4c6c", size = 9531, upload-time = "2021-01-18T22:15:15.36Z" }, -] - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, -] - [[package]] name = "tldextract" version = "5.3.0"