11from __future__ import annotations
22
3- import asyncio
43from collections import deque
54from datetime import datetime , timedelta , timezone
65from logging import getLogger
@@ -40,7 +39,6 @@ def __init__(
4039 self ,
4140 * ,
4241 api_client : RequestQueueClientAsync ,
43- lock : asyncio .Lock ,
4442 ) -> None :
4543 """Initialize a new instance.
4644
@@ -49,14 +47,11 @@ def __init__(
4947 self ._api_client = api_client
5048 """The Apify request queue client for API operations."""
5149
52- self ._lock = lock
53- """A lock to ensure that only one operation is performed at a time."""
54-
5550 self ._queue_head = deque [str ]()
5651 """A deque to store request IDs in the queue head."""
5752
5853 self ._requests_cache : LRUCache [str , CachedRequest ] = LRUCache (maxsize = self ._MAX_CACHED_REQUESTS )
59- """A cache to store request objects."""
54+ """A cache to store request objects. Request ID is used as the cache key. """
6055
6156 self ._queue_has_locked_requests : bool | None = None
6257 """Whether the queue has requests locked by another client."""
@@ -162,7 +157,6 @@ async def open(
162157
163158 return cls (
164159 api_client = apify_rq_client ,
165- lock = asyncio .Lock (),
166160 )
167161
168162 @override
@@ -174,8 +168,7 @@ async def purge(self) -> None:
174168
175169 @override
176170 async def drop (self ) -> None :
177- async with self ._lock :
178- await self ._api_client .delete ()
171+ await self ._api_client .delete ()
179172
180173 @override
181174 async def add_batch_of_requests (
@@ -632,7 +625,7 @@ def _cache_request(
632625 """Cache a request for future use.
633626
634627 Args:
635- cache_key: The key to use for caching the request.
628+ cache_key: The key to use for caching the request. It should be request ID.
636629 processed_request: The processed request information.
637630 forefront: Whether the request was added to the forefront of the queue.
638631 hydrated_request: The hydrated request object, if available.
0 commit comments