@@ -278,7 +278,7 @@ tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame)
278278 }
279279 if (!PyUnicode_IS_READY (filename )) {
280280 /* Don't make a Unicode string ready to avoid reentrant calls
281- to tracemalloc_malloc () or tracemalloc_realloc() */
281+ to tracemalloc_alloc () or tracemalloc_realloc() */
282282#ifdef TRACE_DEBUG
283283 tracemalloc_error ("filename is not a ready unicode string" );
284284#endif
@@ -526,7 +526,8 @@ tracemalloc_add_trace_unlocked(unsigned int domain, uintptr_t ptr,
526526
527527
528528static void *
529- tracemalloc_alloc (int use_calloc , void * ctx , size_t nelem , size_t elsize )
529+ tracemalloc_alloc (int need_gil , int use_calloc ,
530+ void * ctx , size_t nelem , size_t elsize )
530531{
531532 assert (elsize == 0 || nelem <= SIZE_MAX / elsize );
532533
@@ -538,37 +539,71 @@ tracemalloc_alloc(int use_calloc, void *ctx, size_t nelem, size_t elsize)
538539 else {
539540 ptr = alloc -> malloc (alloc -> ctx , nelem * elsize );
540541 }
542+
541543 if (ptr == NULL ) {
542544 return NULL ;
543545 }
546+ if (get_reentrant ()) {
547+ return ptr ;
548+ }
549+
550+ // Ignore reentrant call.
551+ //
552+ // For example, PyObjet_Malloc() calls
553+ // PyMem_Malloc() for allocations larger than 512 bytes: don't trace the
554+ // same memory allocation twice.
555+ //
556+ // If reentrant calls are not ignored, PyGILState_Ensure() can call
557+ // PyMem_RawMalloc() which would call PyGILState_Ensure() again in a loop.
558+ set_reentrant (1 );
544559
560+ PyGILState_STATE gil_state ;
561+ if (need_gil ) {
562+ gil_state = PyGILState_Ensure ();
563+ }
545564 TABLES_LOCK ();
565+
546566 if (ADD_TRACE (ptr , nelem * elsize ) < 0 ) {
547- /* Failed to allocate a trace for the new memory block */
567+ // Failed to allocate a trace for the new memory block
548568 alloc -> free (alloc -> ctx , ptr );
549569 ptr = NULL ;
550- goto done ;
551570 }
552571
553- done :
554572 TABLES_UNLOCK ();
573+ if (need_gil ) {
574+ PyGILState_Release (gil_state );
575+ }
576+ set_reentrant (0 );
555577 return ptr ;
556578}
557579
558580
559581static void *
560- tracemalloc_realloc (void * ctx , void * ptr , size_t new_size )
582+ tracemalloc_realloc (int need_gil , void * ctx , void * ptr , size_t new_size )
561583{
562584 PyMemAllocatorEx * alloc = (PyMemAllocatorEx * )ctx ;
563585 void * ptr2 = alloc -> realloc (alloc -> ctx , ptr , new_size );
586+
564587 if (ptr2 == NULL ) {
565588 return NULL ;
566589 }
590+ if (get_reentrant ()) {
591+ return ptr2 ;
592+ }
567593
594+ // Ignore reentrant call. PyObjet_Realloc() calls PyMem_Realloc() for
595+ // allocations larger than 512 bytes: don't trace the same memory block
596+ // twice.
597+ set_reentrant (1 );
598+
599+ PyGILState_STATE gil_state ;
600+ if (need_gil ) {
601+ gil_state = PyGILState_Ensure ();
602+ }
568603 TABLES_LOCK ();
569604
570605 if (ptr != NULL ) {
571- /* an existing memory block has been resized */
606+ // An existing memory block has been resized
572607
573608 // tracemalloc_add_trace_unlocked() updates the trace if there is
574609 // already a trace at address ptr2.
@@ -577,32 +612,33 @@ tracemalloc_realloc(void *ctx, void *ptr, size_t new_size)
577612 }
578613
579614 if (ADD_TRACE (ptr2 , new_size ) < 0 ) {
580- /* Memory allocation failed. The error cannot be reported to
581- the caller, because realloc() may already have shrunk the
582- memory block and so removed bytes.
583-
584- This case is very unlikely: a hash entry has just been
585- released, so the hash table should have at least one free entry.
586-
587- The GIL and the table lock ensures that only one thread is
588- allocating memory. */
615+ // Memory allocation failed. The error cannot be reported to the
616+ // caller, because realloc() already have shrunk the memory block
617+ // and so removed bytes.
618+ //
619+ // This case is very unlikely: a hash entry has just been released,
620+ // so the hash table should have at least one free entry.
621+ //
622+ // The GIL and the table lock ensures that only one thread is
623+ // allocating memory.
589624 Py_FatalError ("tracemalloc_realloc() failed to allocate a trace" );
590625 }
591626 }
592627 else {
593- /* new allocation */
628+ // New allocation
594629
595630 if (ADD_TRACE (ptr2 , new_size ) < 0 ) {
596- /* Failed to allocate a trace for the new memory block */
631+ // Failed to allocate a trace for the new memory block
597632 alloc -> free (alloc -> ctx , ptr2 );
598633 ptr2 = NULL ;
599- goto done ;
600634 }
601635 }
602636
603- done :
604637 TABLES_UNLOCK ();
605-
638+ if (need_gil ) {
639+ PyGILState_Release (gil_state );
640+ }
641+ set_reentrant (0 );
606642 return ptr2 ;
607643}
608644
@@ -627,124 +663,46 @@ tracemalloc_free(void *ctx, void *ptr)
627663}
628664
629665
630- static void *
631- tracemalloc_alloc_gil (int use_calloc , void * ctx , size_t nelem , size_t elsize )
632- {
633- if (get_reentrant ()) {
634- PyMemAllocatorEx * alloc = (PyMemAllocatorEx * )ctx ;
635- if (use_calloc )
636- return alloc -> calloc (alloc -> ctx , nelem , elsize );
637- else
638- return alloc -> malloc (alloc -> ctx , nelem * elsize );
639- }
640-
641- /* Ignore reentrant call. PyObjet_Malloc() calls PyMem_Malloc() for
642- allocations larger than 512 bytes, don't trace the same memory
643- allocation twice. */
644- set_reentrant (1 );
645-
646- void * ptr = tracemalloc_alloc (use_calloc , ctx , nelem , elsize );
647-
648- set_reentrant (0 );
649- return ptr ;
650- }
651-
652-
653666static void *
654667tracemalloc_malloc_gil (void * ctx , size_t size )
655668{
656- return tracemalloc_alloc_gil ( 0 , ctx , 1 , size );
669+ return tracemalloc_alloc ( 0 , 0 , ctx , 1 , size );
657670}
658671
659672
660673static void *
661674tracemalloc_calloc_gil (void * ctx , size_t nelem , size_t elsize )
662675{
663- return tracemalloc_alloc_gil ( 1 , ctx , nelem , elsize );
676+ return tracemalloc_alloc ( 0 , 1 , ctx , nelem , elsize );
664677}
665678
666679
667680static void *
668681tracemalloc_realloc_gil (void * ctx , void * ptr , size_t new_size )
669682{
670- if (get_reentrant ()) {
671- // Reentrant call to PyMem_Realloc() or PyObject_Realloc().
672- PyMemAllocatorEx * alloc = (PyMemAllocatorEx * )ctx ;
673- return alloc -> realloc (alloc -> ctx , ptr , new_size );
674- }
675-
676- /* Ignore reentrant call. PyObjet_Realloc() calls PyMem_Realloc() for
677- allocations larger than 512 bytes. Don't trace the same memory
678- allocation twice. */
679- set_reentrant (1 );
680-
681- void * ptr2 = tracemalloc_realloc (ctx , ptr , new_size );
682-
683- set_reentrant (0 );
684- return ptr2 ;
683+ return tracemalloc_realloc (0 , ctx , ptr , new_size );
685684}
686685
687686
688687#ifdef TRACE_RAW_MALLOC
689- static void *
690- tracemalloc_raw_alloc (int use_calloc , void * ctx , size_t nelem , size_t elsize )
691- {
692- if (get_reentrant ()) {
693- PyMemAllocatorEx * alloc = (PyMemAllocatorEx * )ctx ;
694- if (use_calloc )
695- return alloc -> calloc (alloc -> ctx , nelem , elsize );
696- else
697- return alloc -> malloc (alloc -> ctx , nelem * elsize );
698- }
699-
700- /* Ignore reentrant call. PyGILState_Ensure() may call PyMem_RawMalloc()
701- indirectly which would call PyGILState_Ensure() if reentrant are not
702- disabled. */
703- set_reentrant (1 );
704-
705- PyGILState_STATE gil_state = PyGILState_Ensure ();
706- void * ptr = tracemalloc_alloc (use_calloc , ctx , nelem , elsize );
707- PyGILState_Release (gil_state );
708-
709- set_reentrant (0 );
710- return ptr ;
711- }
712-
713-
714688static void *
715689tracemalloc_raw_malloc (void * ctx , size_t size )
716690{
717- return tracemalloc_raw_alloc ( 0 , ctx , 1 , size );
691+ return tracemalloc_alloc ( 1 , 0 , ctx , 1 , size );
718692}
719693
720694
721695static void *
722696tracemalloc_raw_calloc (void * ctx , size_t nelem , size_t elsize )
723697{
724- return tracemalloc_raw_alloc ( 1 , ctx , nelem , elsize );
698+ return tracemalloc_alloc ( 1 , 1 , ctx , nelem , elsize );
725699}
726700
727701
728702static void *
729703tracemalloc_raw_realloc (void * ctx , void * ptr , size_t new_size )
730704{
731- if (get_reentrant ()) {
732- // Reentrant call to PyMem_RawRealloc().
733- PyMemAllocatorEx * alloc = (PyMemAllocatorEx * )ctx ;
734- return alloc -> realloc (alloc -> ctx , ptr , new_size );
735- }
736-
737- /* Ignore reentrant call. PyGILState_Ensure() may call PyMem_RawMalloc()
738- indirectly which would call PyGILState_Ensure() if reentrant calls are
739- not disabled. */
740- set_reentrant (1 );
741-
742- PyGILState_STATE gil_state = PyGILState_Ensure ();
743- void * ptr2 = tracemalloc_realloc (ctx , ptr , new_size );
744- PyGILState_Release (gil_state );
745-
746- set_reentrant (0 );
747- return ptr2 ;
705+ return tracemalloc_realloc (1 , ctx , ptr , new_size );
748706}
749707#endif /* TRACE_RAW_MALLOC */
750708
0 commit comments