@@ -1010,7 +1010,13 @@ STATIC_INLINE jl_value_t *jl_gc_big_alloc_inner(jl_ptls_t ptls, size_t sz)
1010
1010
jl_atomic_load_relaxed (& ptls -> gc_num .allocd ) + allocsz );
1011
1011
jl_atomic_store_relaxed (& ptls -> gc_num .bigalloc ,
1012
1012
jl_atomic_load_relaxed (& ptls -> gc_num .bigalloc ) + 1 );
1013
- jl_atomic_fetch_add_relaxed (& gc_heap_stats .heap_size , allocsz );
1013
+ uint64_t alloc_acc = jl_atomic_load_relaxed (& ptls -> gc_num .alloc_acc );
1014
+ if (alloc_acc + allocsz < 16 * 1024 )
1015
+ jl_atomic_store_relaxed (& ptls -> gc_num .alloc_acc , alloc_acc + allocsz );
1016
+ else {
1017
+ jl_atomic_fetch_add_relaxed (& gc_heap_stats .heap_size , alloc_acc + allocsz );
1018
+ jl_atomic_store_relaxed (& ptls -> gc_num .alloc_acc , 0 );
1019
+ }
1014
1020
#ifdef MEMDEBUG
1015
1021
memset (v , 0xee , allocsz );
1016
1022
#endif
@@ -1117,7 +1123,13 @@ void jl_gc_count_allocd(size_t sz) JL_NOTSAFEPOINT
1117
1123
jl_ptls_t ptls = jl_current_task -> ptls ;
1118
1124
jl_atomic_store_relaxed (& ptls -> gc_num .allocd ,
1119
1125
jl_atomic_load_relaxed (& ptls -> gc_num .allocd ) + sz );
1120
- jl_atomic_fetch_add_relaxed (& gc_heap_stats .heap_size , sz );
1126
+ uint64_t alloc_acc = jl_atomic_load_relaxed (& ptls -> gc_num .alloc_acc );
1127
+ if (alloc_acc + sz < 16 * 1024 )
1128
+ jl_atomic_store_relaxed (& ptls -> gc_num .alloc_acc , alloc_acc + sz );
1129
+ else {
1130
+ jl_atomic_fetch_add_relaxed (& gc_heap_stats .heap_size , alloc_acc + sz );
1131
+ jl_atomic_store_relaxed (& ptls -> gc_num .alloc_acc , 0 );
1132
+ }
1121
1133
}
1122
1134
1123
1135
static void combine_thread_gc_counts (jl_gc_num_t * dest ) JL_NOTSAFEPOINT
@@ -3739,7 +3751,13 @@ JL_DLLEXPORT void *jl_gc_managed_malloc(size_t sz)
3739
3751
jl_atomic_load_relaxed (& ptls -> gc_num .allocd ) + allocsz );
3740
3752
jl_atomic_store_relaxed (& ptls -> gc_num .malloc ,
3741
3753
jl_atomic_load_relaxed (& ptls -> gc_num .malloc ) + 1 );
3742
- jl_atomic_fetch_add_relaxed (& gc_heap_stats .heap_size , allocsz );
3754
+ uint64_t alloc_acc = jl_atomic_load_relaxed (& ptls -> gc_num .alloc_acc );
3755
+ if (alloc_acc + allocsz < 16 * 1024 )
3756
+ jl_atomic_store_relaxed (& ptls -> gc_num .alloc_acc , alloc_acc + allocsz );
3757
+ else {
3758
+ jl_atomic_fetch_add_relaxed (& gc_heap_stats .heap_size , alloc_acc + allocsz );
3759
+ jl_atomic_store_relaxed (& ptls -> gc_num .alloc_acc , 0 );
3760
+ }
3743
3761
int last_errno = errno ;
3744
3762
#ifdef _OS_WINDOWS_
3745
3763
DWORD last_error = GetLastError ();
0 commit comments