@@ -239,8 +239,10 @@ async def add_batch_of_requests(
239
239
Response containing information about the added requests.
240
240
"""
241
241
# Do not try to add previously added requests to avoid pointless expensive calls to API
242
+
242
243
new_requests : list [Request ] = []
243
244
already_present_requests : list [dict [str , str | bool ]] = []
245
+
244
246
for request in requests :
245
247
if self ._requests_cache .get (request .id ):
246
248
# We are no sure if it was already handled at this point, and it is not worth calling API for it.
@@ -254,12 +256,22 @@ async def add_batch_of_requests(
254
256
)
255
257
256
258
else :
259
+ # Add new request to the cache.
260
+ processed_request = ProcessedRequest .model_validate (
261
+ {
262
+ 'id' : request .id ,
263
+ 'uniqueKey' : request .unique_key ,
264
+ 'wasAlreadyPresent' : True ,
265
+ 'wasAlreadyHandled' : request .was_already_handled ,
266
+ }
267
+ )
268
+ self ._cache_request (
269
+ unique_key_to_request_id (request .unique_key ),
270
+ processed_request ,
271
+ forefront = False ,
272
+ )
257
273
new_requests .append (request )
258
274
259
- logger .debug (
260
- f'Adding new requests: { len (new_requests )} , '
261
- f'skipping already present requests: { len (already_present_requests )} '
262
- )
263
275
if new_requests :
264
276
# Prepare requests for API by converting to dictionaries.
265
277
requests_dict = [
@@ -272,19 +284,16 @@ async def add_batch_of_requests(
272
284
273
285
# Send requests to API.
274
286
response = await self ._api_client .batch_add_requests (requests = requests_dict , forefront = forefront )
275
- # Add new requests to the cache.
276
- for processed_request_raw in response ['processedRequests' ]:
277
- processed_request = ProcessedRequest .model_validate (processed_request_raw )
278
- self ._cache_request (
279
- unique_key_to_request_id (processed_request .unique_key ),
280
- processed_request ,
281
- forefront = False ,
282
- )
283
287
# Add the locally known already present processed requests based on the local cache.
284
288
response ['processedRequests' ].extend (already_present_requests )
285
289
else :
286
290
response = {'unprocessedRequests' : [], 'processedRequests' : already_present_requests }
287
291
292
+ logger .debug (
293
+ f'Added new requests: { len (new_requests )} , '
294
+ f'skipped already present requests: { len (already_present_requests )} '
295
+ )
296
+
288
297
# Update assumed total count for newly added requests.
289
298
api_response = AddRequestsResponse .model_validate (response )
290
299
new_request_count = 0
0 commit comments