@@ -973,24 +973,23 @@ move_legacy_finalizer_reachable(struct collection_state *state);
973973
974974#ifdef GC_ENABLE_MARK_ALIVE
975975
976- static void
976+ static bool
977977gc_mark_buffer_prime (struct gc_mark_args * args )
978978{
979- for (;;) {
980- Py_ssize_t buf_used = args -> enqueued - args -> dequeued ;
981- if (buf_used >= BUFFER_HI ) {
982- // When priming, don't fill the buffer since that would
983- // likely cause the stack to be used shortly after when it
984- // fills. We want to use the buffer as much as possible and
985- // so we only fill to BUFFER_HI, not BUFFER_SIZE.
986- return ;
987- }
979+ // When priming, don't fill the buffer since that would
980+ // likely cause the stack to be used shortly after when it
981+ // fills. We want to use the buffer as much as possible and
982+ // so we only fill to BUFFER_HI, not BUFFER_SIZE.
983+ Py_ssize_t space = BUFFER_HI - (args -> enqueued - args -> dequeued );
984+ while (space > 0 ) {
988985 PyObject * op = _PyObjectStack_Pop (& args -> stack );
989986 if (op == NULL ) {
990- break ;
987+ return false; // no items left
991988 }
992989 gc_mark_buffer_push (op , args );
990+ space -- ;
993991 }
992+ return true;
994993}
995994
996995static int
@@ -1004,8 +1003,7 @@ gc_propagate_alive(struct gc_mark_args *args)
10041003 // the prefetch vs when the object is actually accessed.
10051004 // Prime the buffer with object pointers from the stack,
10061005 // if there are any available.
1007- gc_mark_buffer_prime (args );
1008- if (args -> enqueued == args -> dequeued ) {
1006+ if (!gc_mark_buffer_prime (args )) {
10091007 return 0 ; // stack and buffer are both empty
10101008 }
10111009 }
0 commit comments