@@ -27,9 +27,9 @@ pub const MAXIMUM_SMALL_WORDS: usize = 32;
27
27
/// The alignment of elements in the arena
28
28
pub const ARENA_ELEMENT_ALIGN : usize = std:: mem:: align_of :: < GcHeader > ( ) ;
29
29
30
- use crate :: layout:: { GcHeader } ;
30
+ use crate :: layout:: { GcHeader , UnknownHeader } ;
31
31
32
- [ inline]
32
+ # [ inline]
33
33
pub const fn fits_small_object ( layout : Layout ) -> bool {
34
34
layout. size ( ) <= MAXIMUM_SMALL_WORDS * std:: mem:: size_of :: < usize > ( )
35
35
&& layout. align ( ) <= ARENA_ELEMENT_ALIGN
@@ -92,38 +92,11 @@ impl Drop for Chunk {
92
92
}
93
93
}
94
94
95
- /// We use zero as our marker value.
96
- ///
97
- /// `GcHeader::type_info` is the first field of the header
98
- /// and it will never be null (its a reference).
99
- /// Therefore this marker will never conflict with a valid header.
100
- pub const FREE_SLOT_MARKER : usize = 0 ;
95
+ /// A slot in the free list
101
96
#[ repr( C ) ]
102
97
pub struct FreeSlot {
103
- /// Marker for the slot, initialized to `FREE_SLOT_MARKER`
104
- pub marker : usize ,
105
98
/// Pointer to the previous free slot
106
- pub ( crate ) prev_free : Option < NonNull < MaybeFreeSlot > > ,
107
- }
108
- #[ repr( C ) ]
109
- pub ( crate ) union MaybeFreeSlot {
110
- pub free : FreeSlot ,
111
- pub header : GcHeader ,
112
- }
113
-
114
- impl MaybeFreeSlot {
115
- #[ inline]
116
- pub unsafe fn is_free ( & self ) -> bool {
117
- self . free . marker == FREE_SLOT_MARKER
118
- }
119
- #[ inline]
120
- pub unsafe fn mark_free ( & mut self , prev : Option < NonNull < MaybeFreeSlot > > ) {
121
- debug_assert ! ( !self . is_free( ) ) ;
122
- self . free = FreeSlot {
123
- marker : FREE_SLOT_MARKER ,
124
- prev_free : prev
125
- } ;
126
- }
99
+ pub ( crate ) prev_free : Option < NonNull < FreeSlot > > ,
127
100
}
128
101
pub const NUM_SMALL_ARENAS : usize = 15 ;
129
102
const INITIAL_SIZE : usize = 512 ;
@@ -186,19 +159,19 @@ impl ArenaState {
186
159
self . current_chunk . store ( ptr) ;
187
160
}
188
161
#[ inline]
189
- fn alloc ( & self , element_size : usize ) -> NonNull < u8 > {
162
+ fn alloc ( & self , element_size : usize ) -> NonNull < UnknownHeader > {
190
163
unsafe {
191
164
let chunk = & * self . current_chunk ( ) . as_ptr ( ) ;
192
165
match chunk. try_alloc ( element_size) {
193
- Some ( header) => header,
166
+ Some ( header) => header. cast ( ) ,
194
167
None => self . alloc_fallback ( element_size)
195
168
}
196
169
}
197
170
}
198
171
199
172
#[ cold]
200
173
#[ inline( never) ]
201
- fn alloc_fallback ( & self , element_size : usize ) -> NonNull < GcHeader > {
174
+ fn alloc_fallback ( & self , element_size : usize ) -> NonNull < UnknownHeader > {
202
175
let mut chunks = self . lock_chunks ( ) ;
203
176
// Now that we hold the lock, check the current chunk again
204
177
unsafe {
@@ -214,7 +187,7 @@ impl ArenaState {
214
187
self . force_current_chunk ( NonNull :: from ( & * * chunks. last ( ) . unwrap ( ) ) ) ;
215
188
self . current_chunk ( ) . as_ref ( )
216
189
. try_alloc ( element_size) . unwrap ( )
217
- . cast :: < u8 > ( )
190
+ . cast :: < UnknownHeader > ( )
218
191
}
219
192
}
220
193
}
@@ -224,19 +197,32 @@ impl ArenaState {
224
197
/// This is a lock-free linked list
225
198
#[ derive( Default ) ]
226
199
pub ( crate ) struct FreeList {
227
- next : AtomicCell < Option < NonNull < MaybeFreeSlot > > >
200
+ next : AtomicCell < Option < NonNull < FreeSlot > > >
228
201
}
229
202
impl FreeList {
203
+ pub ( crate ) unsafe fn add_free ( & self , free : * mut UnknownHeader ) {
204
+ let new_slot = free as * mut FreeSlot ;
205
+ let mut next = self . next . load ( ) ;
206
+ loop {
207
+ ( * new_slot) . prev_free = next;
208
+ match self . next . compare_exchange ( next, Some ( NonNull :: new_unchecked ( new_slot) ) ) {
209
+ Ok ( _) => break ,
210
+ Err ( actual_next) => {
211
+ next = actual_next;
212
+ }
213
+ }
214
+ }
215
+ }
230
216
#[ inline]
231
- pub ( crate ) fn next_free ( & self ) -> Option < NonNull < MaybeFreeSlot > > {
217
+ pub ( crate ) fn next_free ( & self ) -> Option < NonNull < FreeSlot > > {
232
218
self . next . load ( )
233
219
}
234
220
#[ inline]
235
- pub ( crate ) unsafe fn set_next_free ( & self , next : Option < NonNull < MaybeFreeSlot > > ) {
221
+ pub ( crate ) unsafe fn set_next_free ( & self , next : Option < NonNull < FreeSlot > > ) {
236
222
self . next . store ( next)
237
223
}
238
224
#[ inline]
239
- fn take_free ( & self ) -> Option < NonNull < GcHeader > > {
225
+ fn take_free ( & self ) -> Option < NonNull < u8 > > {
240
226
loop {
241
227
let next_free = match self . next . load ( ) {
242
228
Some ( free) => free,
@@ -246,13 +232,9 @@ impl FreeList {
246
232
unsafe {
247
233
if self . next . compare_exchange (
248
234
Some ( next_free) ,
249
- next_free. as_ref ( ) . free . prev_free
235
+ next_free. as_ref ( ) . prev_free
250
236
) . is_err ( ) { continue /* retry */ }
251
- debug_assert_eq ! (
252
- next_free. as_ref( ) . free. marker,
253
- FREE_SLOT_MARKER
254
- ) ;
255
- return Some ( NonNull :: from ( & next_free. as_ref ( ) . header ) )
237
+ return Some ( next_free. cast ( ) )
256
238
}
257
239
}
258
240
}
@@ -261,9 +243,13 @@ impl FreeList {
261
243
pub struct SmallArena {
262
244
pub ( crate ) element_size : usize ,
263
245
state : ArenaState ,
264
- pub ( crate ) free : FreeList
246
+ free : FreeList ,
265
247
}
248
+
266
249
impl SmallArena {
250
+ pub ( crate ) unsafe fn add_free ( & self , obj : * mut UnknownHeader ) {
251
+ self . free . add_free ( obj)
252
+ }
267
253
#[ cold] // Initialization is the slow path
268
254
fn with_words ( num_words : usize ) -> SmallArena {
269
255
assert ! ( num_words >= MINIMUM_WORDS ) ;
@@ -276,27 +262,14 @@ impl SmallArena {
276
262
}
277
263
}
278
264
#[ inline]
279
- pub ( crate ) fn alloc ( & self ) -> NonNull < u8 > {
265
+ pub ( crate ) fn alloc ( & self ) -> NonNull < UnknownHeader > {
280
266
// Check the free list
281
267
if let Some ( free) = self . free . take_free ( ) {
282
- let res = free. as_ptr ( ) . sub ( match free. as_ref ( ) . type_info . layout {
283
- Layout :: new
284
- } )
268
+ free. cast ( )
285
269
} else {
286
270
self . state . alloc ( self . element_size )
287
271
}
288
272
}
289
- pub ( crate ) unsafe fn for_each < F : FnMut ( * mut MaybeFreeSlot ) > ( & self , mut func : F ) {
290
- let chunks = self . state . lock_chunks ( ) ;
291
- for chunk in & * chunks {
292
- let mut ptr = chunk. start ;
293
- let end = chunk. current ( ) ;
294
- while ptr < end {
295
- func ( ptr as * mut MaybeFreeSlot ) ;
296
- ptr = ptr. add ( self . element_size ) ;
297
- }
298
- }
299
- }
300
273
}
301
274
macro_rules! arena_match {
302
275
( $arenas: expr, $target: ident, max = $max: expr; $( $size: pat => $num_words: literal @ $idx: expr) ,* ) => {
@@ -351,7 +324,7 @@ impl SmallArenaList {
351
324
}
352
325
// Divide round up
353
326
let word_size = mem:: size_of :: < usize > ( ) ;
354
- let num_words = ( small_object_size ( layout) + ( word_size - 1 ) )
327
+ let num_words = ( layout. size ( ) + ( word_size - 1 ) )
355
328
/ word_size;
356
329
self . find_raw ( num_words)
357
330
}
0 commit comments