11#include "internal.h"
22#include <stdatomic.h>
33#include <limits.h>
4+ #include <stdint.h>
5+ #include <stdio.h>
46#include <stdlib.h>
57#include <unistd.h>
68
@@ -9,7 +11,10 @@ static inline void free_stack_lock(struct SwiftNetMemoryAllocatorStack* const st
911}
1012
1113struct SwiftNetMemoryAllocatorStack * const find_free_pointer_stack (const struct SwiftNetMemoryAllocator * const allocator ) {
14+ printf ("starting\n" );
15+
1216 for (struct SwiftNetMemoryAllocatorStack * current_stack = atomic_load (& allocator -> data .first_item ); current_stack != NULL ; current_stack = atomic_load_explicit (& current_stack -> next , memory_order_acquire )) {
17+ printf ("Got thing\n" );
1318 uint8_t thread_none = ALLOCATOR_STACK_FREE ;
1419
1520 if (!atomic_compare_exchange_strong_explicit (
@@ -22,6 +27,8 @@ struct SwiftNetMemoryAllocatorStack* const find_free_pointer_stack(const struct
2227 continue ;
2328 }
2429
30+ printf ("%d %d\n" , atomic_load_explicit (& current_stack -> size , memory_order_acquire ), allocator -> chunk_item_amount );
31+
2532 if (atomic_load (& current_stack -> size ) < allocator -> chunk_item_amount ) {
2633 return current_stack ;
2734 } else {
@@ -91,6 +98,11 @@ struct SwiftNetMemoryAllocator allocator_create(const uint32_t item_size, const
9198
9299 atomic_store_explicit (& new_allocator .creating_stack , STACK_CREATING_UNLOCKED , memory_order_release );
93100
101+ #ifdef SWIFT_NET_DEBUG
102+ atomic_store_explicit (& first_stack -> accessing_ptr_status , false, memory_order_release );
103+ first_stack -> ptr_status = calloc (sizeof (uint8_t ), (chunk_item_amount / 8 ) + 1 );
104+ #endif
105+
94106 return new_allocator ;
95107}
96108
@@ -124,14 +136,19 @@ static void create_new_stack(struct SwiftNetMemoryAllocator* const memory_alloca
124136 stack -> pointers = allocated_memory_pointers ;
125137 stack -> data = allocated_memory ;
126138 stack -> size = chunk_item_amount ;
127- stack -> previous = atomic_load (& memory_allocator -> data .last_item );
139+ stack -> previous = atomic_load_explicit (& memory_allocator -> data .last_item , memory_order_acquire );
128140 stack -> next = NULL ;
129- stack -> owner = ALLOCATOR_STACK_FREE ;
141+ atomic_store_explicit ( & stack -> owner , ALLOCATOR_STACK_FREE , memory_order_release ) ;
130142
131143 for (uint32_t i = 0 ; i < chunk_item_amount ; i ++ ) {
132144 ((void * * )allocated_memory_pointers )[i ] = (uint8_t * )allocated_memory + (i * item_size );
133145 }
134146
147+ #ifdef SWIFT_NET_DEBUG
148+ atomic_store_explicit (& stack -> accessing_ptr_status , false, memory_order_release );
149+ stack -> ptr_status = calloc (sizeof (uint8_t ), (chunk_item_amount / 8 ) + 1 );
150+ #endif
151+
135152 atomic_store_explicit (& ((struct SwiftNetMemoryAllocatorStack * )atomic_load (& memory_allocator -> data .last_item ))-> next , stack , memory_order_release );
136153 atomic_store_explicit (& memory_allocator -> data .last_item , stack , memory_order_release );
137154 atomic_store_explicit (& memory_allocator -> creating_stack , STACK_CREATING_UNLOCKED , memory_order_release );
@@ -153,20 +170,72 @@ void* allocator_allocate(struct SwiftNetMemoryAllocator* const memory_allocator)
153170
154171 void * item_ptr = * ptr_to_data ;
155172
173+ #ifdef SWIFT_NET_DEBUG
174+ const uint32_t offset = item_ptr - valid_stack -> data ;
175+ const uint32_t index = offset / memory_allocator -> item_size ;
176+
177+ const uint32_t byte = index / 8 ;
178+ const uint8_t bit = index % 8 ;
179+
180+ bool target = false;
181+
182+ while (!atomic_compare_exchange_strong_explicit (
183+ & valid_stack -> accessing_ptr_status ,
184+ & target ,
185+ true,
186+ memory_order_acquire ,
187+ memory_order_relaxed
188+ )) {
189+ target = false;
190+ }
191+
192+ * (valid_stack -> ptr_status + byte ) |= (1u << bit );
193+
194+ atomic_store_explicit (& valid_stack -> accessing_ptr_status , false, memory_order_release );
195+ #endif
196+
156197 free_stack_lock (valid_stack );
157198
158199 return item_ptr ;
159200}
160201
161202#ifdef SWIFT_NET_DEBUG
162203 static inline bool is_already_free (struct SwiftNetMemoryAllocator * const memory_allocator , void * const memory_location ) {
163- /*for (struct SwiftNetMemoryAllocatorStack* restrict stack = memory_allocator->free_memory_pointers.first_item; stack != NULL; stack = stack->next) {
164- for (uint32_t i = 0; i < stack->size; i++) {
165- if (*(((void**)stack->data) + i) == memory_location) {
204+ for (struct SwiftNetMemoryAllocatorStack * stack = atomic_load_explicit (& memory_allocator -> data .first_item , memory_order_acquire ); stack != NULL ; stack = atomic_load_explicit (& stack -> next , memory_order_acquire )) {
205+ if (
206+ memory_location >= stack -> data
207+ &&
208+ memory_location <= stack -> data + (memory_allocator -> item_size * memory_allocator -> chunk_item_amount )
209+ ) {
210+ const uint32_t offset = memory_location - stack -> data ;
211+ const uint32_t index = offset / memory_allocator -> item_size ;
212+
213+ const uint32_t byte = index / 8 ;
214+ const uint8_t bit = index % 8 ;
215+
216+ bool target = false;
217+
218+ while (!atomic_compare_exchange_strong_explicit (
219+ & stack -> accessing_ptr_status ,
220+ & target ,
221+ true,
222+ memory_order_acquire ,
223+ memory_order_relaxed
224+ )) {
225+ target = false;
226+ }
227+
228+ if (((* (stack -> ptr_status + byte )) & (1u << bit )) != 0 ) {
229+ atomic_store_explicit (& stack -> accessing_ptr_status , false, memory_order_release );
230+
231+ return false;
232+ } else {
233+ atomic_store_explicit (& stack -> accessing_ptr_status , false, memory_order_release );
234+
166235 return true;
167236 }
168237 }
169- }*/
238+ }
170239
171240 return false;
172241 }
@@ -195,6 +264,30 @@ void allocator_free(struct SwiftNetMemoryAllocator* const memory_allocator, void
195264
196265 ((void * * )free_stack -> pointers )[size ] = memory_location ;
197266
267+ #ifdef SWIFT_NET_DEBUG
268+ const uint32_t offset = memory_location - free_stack -> data ;
269+ const uint32_t index = offset / memory_allocator -> item_size ;
270+
271+ const uint32_t byte = index / 8 ;
272+ const uint8_t bit = index % 8 ;
273+
274+ bool target = false;
275+
276+ while (!atomic_compare_exchange_strong_explicit (
277+ & free_stack -> accessing_ptr_status ,
278+ & target ,
279+ true,
280+ memory_order_acquire ,
281+ memory_order_relaxed
282+ )) {
283+ target = false;
284+ }
285+
286+ * (free_stack -> ptr_status + byte ) &= ~(1u << bit );
287+
288+ atomic_store_explicit (& free_stack -> accessing_ptr_status , false, memory_order_release );
289+ #endif
290+
198291 free_stack_lock (free_stack );
199292}
200293
@@ -211,6 +304,10 @@ void allocator_destroy(struct SwiftNetMemoryAllocator* const memory_allocator) {
211304
212305 free (current_stack_pointers );
213306
307+ #ifdef SWIFT_NET_DEBUG
308+ free (current_stack_pointers -> ptr_status );
309+ #endif
310+
214311 current_stack_pointers = next_stack ;
215312 }
216313}
0 commit comments