@@ -245,20 +245,22 @@ async def add_batch_of_requests(
245
245
# Do not try to add previously added requests to avoid pointless expensive calls to API
246
246
247
247
new_requests : list [Request ] = []
248
- already_present_requests : list [dict [ str , str | bool ] ] = []
248
+ already_present_requests : list [ProcessedRequest ] = []
249
249
250
250
for request in requests :
251
251
if self ._requests_cache .get (request .id ):
252
252
# We are not sure if it was already handled at this point, and it is not worth calling API for it.
253
253
# It could have been handled by another client in the meantime, so cached information about
254
254
# `request.was_already_handled` is not reliable.
255
255
already_present_requests .append (
256
- {
257
- 'id' : request .id ,
258
- 'uniqueKey' : request .unique_key ,
259
- 'wasAlreadyPresent' : True ,
260
- 'wasAlreadyHandled' : request .was_already_handled ,
261
- }
256
+ ProcessedRequest .model_validate (
257
+ {
258
+ 'id' : request .id ,
259
+ 'uniqueKey' : request .unique_key ,
260
+ 'wasAlreadyPresent' : True ,
261
+ 'wasAlreadyHandled' : request .was_already_handled ,
262
+ }
263
+ )
262
264
)
263
265
264
266
else :
@@ -288,25 +290,29 @@ async def add_batch_of_requests(
288
290
]
289
291
290
292
# Send requests to API.
291
- response = await self ._api_client .batch_add_requests (requests = requests_dict , forefront = forefront )
293
+ api_response = AddRequestsResponse .model_validate (
294
+ await self ._api_client .batch_add_requests (requests = requests_dict , forefront = forefront )
295
+ )
296
+
292
297
# Add the locally known already present processed requests based on the local cache.
293
- response [ 'processedRequests' ] .extend (already_present_requests )
298
+ api_response . processed_requests .extend (already_present_requests )
294
299
295
300
# Remove unprocessed requests from the cache
296
- for unprocessed in response [ 'unprocessedRequests' ] :
297
- self ._requests_cache .pop (unique_key_to_request_id (unprocessed [ 'uniqueKey' ] ), None )
301
+ for unprocessed_request in api_response . unprocessed_requests :
302
+ self ._requests_cache .pop (unique_key_to_request_id (unprocessed_request . unique_key ), None )
298
303
299
304
else :
300
- response = {'unprocessedRequests' : [], 'processedRequests' : already_present_requests }
305
+ api_response = AddRequestsResponse .model_validate (
306
+ {'unprocessedRequests' : [], 'processedRequests' : already_present_requests }
307
+ )
301
308
302
309
logger .debug (
303
310
f'Tried to add new requests: { len (new_requests )} , '
304
- f'succeeded to add new requests: { len (response [ "processedRequests" ] ) - len (already_present_requests )} , '
311
+ f'succeeded to add new requests: { len (api_response . processed_requests ) - len (already_present_requests )} , '
305
312
f'skipped already present requests: { len (already_present_requests )} '
306
313
)
307
314
308
315
# Update assumed total count for newly added requests.
309
- api_response = AddRequestsResponse .model_validate (response )
310
316
new_request_count = 0
311
317
for processed_request in api_response .processed_requests :
312
318
if not processed_request .was_already_present and not processed_request .was_already_handled :
0 commit comments