@@ -239,8 +239,10 @@ async def add_batch_of_requests(
239239 Response containing information about the added requests.
240240 """
241241 # Do not try to add previously added requests to avoid pointless expensive calls to API
242+
242243 new_requests : list [Request ] = []
243244 already_present_requests : list [dict [str , str | bool ]] = []
245+
244246 for request in requests :
245247 if self ._requests_cache .get (request .id ):
246248 # We are no sure if it was already handled at this point, and it is not worth calling API for it.
@@ -254,12 +256,22 @@ async def add_batch_of_requests(
254256 )
255257
256258 else :
259+ # Add new request to the cache.
260+ processed_request = ProcessedRequest .model_validate (
261+ {
262+ 'id' : request .id ,
263+ 'uniqueKey' : request .unique_key ,
264+ 'wasAlreadyPresent' : True ,
265+ 'wasAlreadyHandled' : request .was_already_handled ,
266+ }
267+ )
268+ self ._cache_request (
269+ unique_key_to_request_id (request .unique_key ),
270+ processed_request ,
271+ forefront = False ,
272+ )
257273 new_requests .append (request )
258274
259- logger .debug (
260- f'Adding new requests: { len (new_requests )} , '
261- f'skipping already present requests: { len (already_present_requests )} '
262- )
263275 if new_requests :
264276 # Prepare requests for API by converting to dictionaries.
265277 requests_dict = [
@@ -272,19 +284,16 @@ async def add_batch_of_requests(
272284
273285 # Send requests to API.
274286 response = await self ._api_client .batch_add_requests (requests = requests_dict , forefront = forefront )
275- # Add new requests to the cache.
276- for processed_request_raw in response ['processedRequests' ]:
277- processed_request = ProcessedRequest .model_validate (processed_request_raw )
278- self ._cache_request (
279- unique_key_to_request_id (processed_request .unique_key ),
280- processed_request ,
281- forefront = False ,
282- )
283287 # Add the locally known already present processed requests based on the local cache.
284288 response ['processedRequests' ].extend (already_present_requests )
285289 else :
286290 response = {'unprocessedRequests' : [], 'processedRequests' : already_present_requests }
287291
292+ logger .debug (
293+ f'Added new requests: { len (new_requests )} , '
294+ f'skipped already present requests: { len (already_present_requests )} '
295+ )
296+
288297 # Update assumed total count for newly added requests.
289298 api_response = AddRequestsResponse .model_validate (response )
290299 new_request_count = 0
0 commit comments