@@ -10,11 +10,26 @@ static inline void free_stack_lock(struct SwiftNetMemoryAllocatorStack* const st
1010 atomic_store_explicit (& stack -> owner , ALLOCATOR_STACK_FREE , memory_order_release );
1111}
1212
13- struct SwiftNetMemoryAllocatorStack * const find_free_pointer_stack (const struct SwiftNetMemoryAllocator * const allocator ) {
14- printf ("starting\n" );
13+ static inline void unlock_ptr_status (struct SwiftNetMemoryAllocatorStack * const stack ) {
14+ atomic_store_explicit (& stack -> accessing_ptr_status , false, memory_order_release );
15+ }
16+
17+ static inline void lock_ptr_status (struct SwiftNetMemoryAllocatorStack * const stack ) {
18+ bool target = false;
19+
20+ while (!atomic_compare_exchange_strong_explicit (
21+ & stack -> accessing_ptr_status ,
22+ & target ,
23+ true,
24+ memory_order_acquire ,
25+ memory_order_relaxed
26+ )) {
27+ target = false;
28+ }
29+ }
1530
31+ struct SwiftNetMemoryAllocatorStack * const find_free_pointer_stack (const struct SwiftNetMemoryAllocator * const allocator ) {
1632 for (struct SwiftNetMemoryAllocatorStack * current_stack = atomic_load (& allocator -> data .first_item ); current_stack != NULL ; current_stack = atomic_load_explicit (& current_stack -> next , memory_order_acquire )) {
17- printf ("Got thing\n" );
1833 uint8_t thread_none = ALLOCATOR_STACK_FREE ;
1934
2035 if (!atomic_compare_exchange_strong_explicit (
@@ -27,8 +42,6 @@ struct SwiftNetMemoryAllocatorStack* const find_free_pointer_stack(const struct
2742 continue ;
2843 }
2944
30- printf ("%d %d\n" , atomic_load_explicit (& current_stack -> size , memory_order_acquire ), allocator -> chunk_item_amount );
31-
3245 if (atomic_load (& current_stack -> size ) < allocator -> chunk_item_amount ) {
3346 return current_stack ;
3447 } else {
@@ -177,21 +190,11 @@ void* allocator_allocate(struct SwiftNetMemoryAllocator* const memory_allocator)
177190 const uint32_t byte = index / 8 ;
178191 const uint8_t bit = index % 8 ;
179192
180- bool target = false;
181-
182- while (!atomic_compare_exchange_strong_explicit (
183- & valid_stack -> accessing_ptr_status ,
184- & target ,
185- true,
186- memory_order_acquire ,
187- memory_order_relaxed
188- )) {
189- target = false;
190- }
193+ lock_ptr_status (valid_stack );
191194
192195 * (valid_stack -> ptr_status + byte ) |= (1u << bit );
193196
194- atomic_store_explicit ( & valid_stack -> accessing_ptr_status , false, memory_order_release );
197+ unlock_ptr_status ( valid_stack );
195198 #endif
196199
197200 free_stack_lock (valid_stack );
@@ -213,24 +216,14 @@ void* allocator_allocate(struct SwiftNetMemoryAllocator* const memory_allocator)
213216 const uint32_t byte = index / 8 ;
214217 const uint8_t bit = index % 8 ;
215218
216- bool target = false;
217-
218- while (!atomic_compare_exchange_strong_explicit (
219- & stack -> accessing_ptr_status ,
220- & target ,
221- true,
222- memory_order_acquire ,
223- memory_order_relaxed
224- )) {
225- target = false;
226- }
219+ lock_ptr_status (stack );
227220
228221 if (((* (stack -> ptr_status + byte )) & (1u << bit )) != 0 ) {
229- atomic_store_explicit ( & stack -> accessing_ptr_status , false, memory_order_release );
222+ unlock_ptr_status ( stack );
230223
231224 return false;
232225 } else {
233- atomic_store_explicit ( & stack -> accessing_ptr_status , false, memory_order_release );
226+ unlock_ptr_status ( stack );
234227
235228 return true;
236229 }
@@ -271,43 +264,49 @@ void allocator_free(struct SwiftNetMemoryAllocator* const memory_allocator, void
271264 const uint32_t byte = index / 8 ;
272265 const uint8_t bit = index % 8 ;
273266
274- bool target = false;
275-
276- while (!atomic_compare_exchange_strong_explicit (
277- & free_stack -> accessing_ptr_status ,
278- & target ,
279- true,
280- memory_order_acquire ,
281- memory_order_relaxed
282- )) {
283- target = false;
284- }
267+ lock_ptr_status (free_stack );
285268
286269 * (free_stack -> ptr_status + byte ) &= ~(1u << bit );
287270
288- atomic_store_explicit ( & free_stack -> accessing_ptr_status , false, memory_order_release );
271+ unlock_ptr_status ( free_stack );
289272 #endif
290273
291274 free_stack_lock (free_stack );
292275}
293276
294277void allocator_destroy (struct SwiftNetMemoryAllocator * const memory_allocator ) {
295- for (struct SwiftNetMemoryAllocatorStack * current_stack_pointers = atomic_load (& memory_allocator -> data .first_item ); ; ) {
296- free (current_stack_pointers -> data );
297- free (current_stack_pointers -> pointers );
278+ for (struct SwiftNetMemoryAllocatorStack * current_stack = atomic_load (& memory_allocator -> data .first_item ); ; ) {
279+
280+ free (current_stack -> data );
281+ free (current_stack -> pointers );
298282
299- struct SwiftNetMemoryAllocatorStack * const next_stack = atomic_load (& current_stack_pointers -> next );
283+ struct SwiftNetMemoryAllocatorStack * const next_stack = atomic_load (& current_stack -> next );
300284 if (next_stack == NULL ) {
301- free (current_stack_pointers );
285+ free (current_stack );
302286 break ;
303287 }
304288
305- free (current_stack_pointers );
289+ free (current_stack );
306290
307291 #ifdef SWIFT_NET_DEBUG
308- free (current_stack_pointers -> ptr_status );
292+ lock_ptr_status (current_stack );
293+
294+ for (uint32_t i = 0 ; i < sizeof (uint8_t ) * (memory_allocator -> chunk_item_amount / 8 ) + 1 ; i ++ ) {
295+ if (current_stack -> ptr_status + i != 0x00 ) {
296+ for (uint8_t bit = 0 ; bit < 8 ; bit ++ ) {
297+ if ((* (current_stack -> ptr_status + i ) & (1 << bit )) == 0x00 ) {
298+ bytes_leaked += memory_allocator -> item_size ;
299+ items_leaked ++ ;
300+ }
301+ }
302+ }
303+ }
304+
305+ unlock_ptr_status (current_stack );
306+
307+ free (current_stack -> ptr_status );
309308 #endif
310309
311- current_stack_pointers = next_stack ;
310+ current_stack = next_stack ;
312311 }
313312}
0 commit comments