@@ -68,6 +68,13 @@ void CLR_RT_GarbageCollector::Heap_Compact()
6868
6969 // --//
7070
71+ #if defined(NANOCLR_TRACE_MEMORY_STATS)
72+ if (s_CLR_RT_fTrace_MemoryStats >= c_CLR_RT_Trace_Verbose)
73+ {
74+ CLR_Debug::Printf (" \r\n GC: Heap relocate prepare\r\n " );
75+ }
76+ #endif
77+
7178 TestPointers_PopulateOld ();
7279
7380 CLR_RT_HeapCluster *freeRegion_hc = NULL ;
@@ -86,21 +93,31 @@ void CLR_RT_GarbageCollector::Heap_Compact()
8693 // Move to the next first free region.
8794 //
8895 freeRegion_hc = (CLR_RT_HeapCluster *)g_CLR_RT_ExecutionEngine.m_heap .FirstNode ();
96+
8997 while (true )
9098 {
9199 CLR_RT_HeapCluster *freeRegion_hcNext = (CLR_RT_HeapCluster *)freeRegion_hc->Next ();
100+
92101 if (!freeRegion_hcNext)
102+ {
93103 break ;
104+ }
94105
95106 freeRegion = freeRegion_hc->m_freeList .FirstNode ();
107+
96108 if (freeRegion->Next ())
109+ {
97110 break ;
111+ }
98112
99113 freeRegion = NULL ;
100114 freeRegion_hc = freeRegion_hcNext;
101115 }
116+
102117 if (!freeRegion)
118+ {
103119 break ;
120+ }
104121 }
105122
106123 while (true )
@@ -121,13 +138,15 @@ void CLR_RT_GarbageCollector::Heap_Compact()
121138 }
122139
123140 if (currentSource == currentSource_end)
141+ {
124142 break ;
143+ }
125144
126- // ////////////////////////////////////////////////////
127- //
128- // At this point, we have at least ONE movable block.
129- //
130- // ////////////////////////////////////////////////////
145+ // ////////////////////////////////////////////////////
146+ //
147+ // At this point, we have at least ONE movable block.
148+ //
149+ // ////////////////////////////////////////////////////
131150
132151#if NANOCLR_VALIDATE_HEAP >= NANOCLR_VALIDATE_HEAP_4_CompactionPlus
133152 if (IsBlockInFreeList (g_CLR_RT_ExecutionEngine.m_heap , freeRegion, true ) == false )
@@ -320,6 +339,14 @@ void CLR_RT_GarbageCollector::Heap_Compact()
320339void CLR_RT_GarbageCollector::Heap_Relocate_Prepare (RelocationRegion *blocks, size_t total)
321340{
322341 NATIVE_PROFILE_CLR_CORE ();
342+
343+ #if defined(NANOCLR_TRACE_MEMORY_STATS)
344+ if (s_CLR_RT_fTrace_MemoryStats >= c_CLR_RT_Trace_Verbose)
345+ {
346+ CLR_Debug::Printf (" \r\n GC: Relocation - prepare\r\n " );
347+ }
348+ #endif
349+
323350 m_relocBlocks = blocks;
324351 m_relocTotal = total;
325352 m_relocCount = 0 ;
@@ -368,6 +395,14 @@ void CLR_RT_GarbageCollector::Heap_Relocate_AddBlock(CLR_UINT8 *dst, CLR_UINT8 *
368395void CLR_RT_GarbageCollector::Heap_Relocate ()
369396{
370397 NATIVE_PROFILE_CLR_CORE ();
398+
399+ #if defined(NANOCLR_TRACE_MEMORY_STATS)
400+ if (s_CLR_RT_fTrace_MemoryStats >= c_CLR_RT_Trace_Verbose)
401+ {
402+ CLR_Debug::Printf (" \r\n GC: Relocating Heap\r\n " );
403+ }
404+ #endif
405+
371406 if (m_relocCount)
372407 {
373408 RelocationRegion *relocBlocks = m_relocBlocks;
@@ -417,13 +452,30 @@ void CLR_RT_GarbageCollector::Heap_Relocate_Pass(RelocateFtn ftn)
417452 (void )ftn;
418453#endif
419454
455+ #if defined(NANOCLR_TRACE_MEMORY_STATS)
456+ if (s_CLR_RT_fTrace_MemoryStats >= c_CLR_RT_Trace_Verbose)
457+ {
458+ CLR_Debug::Printf (" \r\n GC: Relocation - pass\r\n " );
459+ }
460+ #endif
461+
420462 NANOCLR_FOREACH_NODE (CLR_RT_HeapCluster, hc, g_CLR_RT_ExecutionEngine.m_heap )
421463 {
422464 CLR_RT_HeapBlock_Node *ptr = hc->m_payloadStart ;
423465 CLR_RT_HeapBlock_Node *end = hc->m_payloadEnd ;
424466
467+ // check pointers
468+ _ASSERTE (ptr >= (void *)s_CLR_RT_Heap.m_location );
469+ _ASSERTE (ptr < (void *)(s_CLR_RT_Heap.m_location + s_CLR_RT_Heap.m_size ));
470+ _ASSERTE (end >= (void *)s_CLR_RT_Heap.m_location );
471+ _ASSERTE (end <= (void *)(s_CLR_RT_Heap.m_location + s_CLR_RT_Heap.m_size ));
472+
425473 while (ptr < end)
426474 {
475+ // check pointer
476+ _ASSERTE (ptr >= (void *)s_CLR_RT_Heap.m_location );
477+ _ASSERTE (ptr < (void *)(s_CLR_RT_Heap.m_location + s_CLR_RT_Heap.m_size ));
478+
427479 CLR_RT_HEAPBLOCK_RELOCATE (ptr);
428480
429481 ptr += ptr->DataSize ();
@@ -439,6 +491,14 @@ void CLR_RT_GarbageCollector::Heap_Relocate_Pass(RelocateFtn ftn)
439491void CLR_RT_GarbageCollector::Heap_Relocate (CLR_RT_HeapBlock *lst, CLR_UINT32 len)
440492{
441493 NATIVE_PROFILE_CLR_CORE ();
494+
495+ #if defined(NANOCLR_TRACE_MEMORY_STATS)
496+ if (s_CLR_RT_fTrace_MemoryStats >= c_CLR_RT_Trace_Verbose)
497+ {
498+ CLR_Debug::Printf (" \r\n GC: Relocating\r\n " );
499+ }
500+ #endif
501+
442502 while (len--)
443503 {
444504 CLR_RT_HEAPBLOCK_RELOCATE (lst);
@@ -457,11 +517,27 @@ void CLR_RT_GarbageCollector::Heap_Relocate(void **ref)
457517#if NANOCLR_VALIDATE_HEAP > NANOCLR_VALIDATE_HEAP_0_None
458518 if (g_CLR_RT_GarbageCollector.m_relocWorker )
459519 {
520+
521+ #if defined(NANOCLR_TRACE_MEMORY_STATS)
522+ if (s_CLR_RT_fTrace_MemoryStats >= c_CLR_RT_Trace_Verbose)
523+ {
524+ CLR_Debug::Printf (" \r\n GC: Relocating with worker\r\n " );
525+ }
526+ #endif
527+
460528 g_CLR_RT_GarbageCollector.m_relocWorker (ref);
461529 }
462530 else
463531#endif
464532 {
533+
534+ #if defined(NANOCLR_TRACE_MEMORY_STATS)
535+ if (s_CLR_RT_fTrace_MemoryStats >= c_CLR_RT_Trace_Verbose)
536+ {
537+ CLR_Debug::Printf (" \r\n GC: Relocating Heap\r\n " );
538+ }
539+ #endif
540+
465541 if (dst >= g_CLR_RT_GarbageCollector.m_relocMinimum && dst < g_CLR_RT_GarbageCollector.m_relocMaximum )
466542 {
467543 RelocationRegion *relocBlocks = g_CLR_RT_GarbageCollector.m_relocBlocks ;
@@ -503,6 +579,13 @@ bool CLR_RT_GarbageCollector::Relocation_JustCheck(void **ref)
503579 NATIVE_PROFILE_CLR_CORE ();
504580 CLR_UINT8 *dst = (CLR_UINT8 *)*ref;
505581
582+ #if defined(NANOCLR_TRACE_MEMORY_STATS)
583+ if (s_CLR_RT_fTrace_MemoryStats >= c_CLR_RT_Trace_Verbose)
584+ {
585+ CLR_Debug::Printf (" \r\n GC: Relocation - just checking\r\n " );
586+ }
587+ #endif
588+
506589 if (dst)
507590 {
508591 ValidateBlockNotInFreeList (g_CLR_RT_ExecutionEngine.m_heap , (CLR_RT_HeapBlock_Node *)dst);
0 commit comments