@@ -563,7 +563,11 @@ async def _list_head(
563563 queue_has_locked_requests = self ._queue_has_locked_requests ,
564564 lock_time = lock_time ,
565565 )
566+
567+ leftover_buffer = list [str ]()
566568 if self ._should_check_for_forefront_requests :
569+ leftover_buffer = list (self ._queue_head )
570+ self ._queue_head .clear ()
567571 self ._should_check_for_forefront_requests = False
568572
569573 # Otherwise fetch from API
@@ -578,9 +582,7 @@ async def _list_head(
578582 # Update the queue head cache
579583 self ._queue_has_locked_requests = response .get ('queueHasLockedRequests' , False )
580584
581- # Iterate over new requests and push them to the front of the queue.
582- # Since we push to the front of the queue, we have to iterate in reverse order to preserve the intended order.
583- for request_data in reversed (response .get ('items' , [])):
585+ for request_data in response .get ('items' , []):
584586 request = Request .model_validate (request_data )
585587
586588 # Skip requests without ID or unique key
@@ -606,8 +608,11 @@ async def _list_head(
606608 hydrated_request = request ,
607609 )
608610
609- # All new requests are added to the forefront, existing leftover locked requests kept in the end.
610- self ._queue_head .appendleft (request .id )
611+ self ._queue_head .append (request .id )
612+
613+ for leftover_request_id in leftover_buffer :
614+ # After adding new requests to the forefront, any existing leftover locked request is kept in the end.
615+ self ._queue_head .append (leftover_request_id )
611616
612617 return RequestQueueHead .model_validate (response )
613618
0 commit comments