@@ -20,6 +20,7 @@ pub unsafe trait FlexSource {
20
20
/// `min_size` must be a multiple of [`GRANULARITY`]. `min_size` must not
21
21
/// be zero.
22
22
#[ inline]
23
+ #[ default_method_body_is_const]
23
24
unsafe fn alloc ( & mut self , min_size : usize ) -> Option < NonNull < [ u8 ] > > {
24
25
let _ = min_size;
25
26
None
@@ -34,6 +35,7 @@ pub unsafe trait FlexSource {
34
35
/// `ptr` must be an existing allocation made by this
35
36
/// allocator. `min_new_len` must be greater than or equal to `ptr.len()`.
36
37
#[ inline]
38
+ #[ default_method_body_is_const]
37
39
unsafe fn realloc_inplace_grow (
38
40
& mut self ,
39
41
ptr : NonNull < [ u8 ] > ,
@@ -61,6 +63,7 @@ pub unsafe trait FlexSource {
61
63
///
62
64
/// The returned value must be constant for a particular instance of `Self`.
63
65
#[ inline]
66
+ #[ default_method_body_is_const]
64
67
fn supports_dealloc ( & self ) -> bool {
65
68
false
66
69
}
@@ -73,6 +76,7 @@ pub unsafe trait FlexSource {
73
76
///
74
77
/// The returned value must be constant for a particular instance of `Self`.
75
78
#[ inline]
79
+ #[ default_method_body_is_const]
76
80
fn supports_realloc_inplace_grow ( & self ) -> bool {
77
81
false
78
82
}
@@ -92,6 +96,7 @@ pub unsafe trait FlexSource {
92
96
///
93
97
/// The returned value must be constant for a particular instance of `Self`.
94
98
#[ inline]
99
+ #[ default_method_body_is_const]
95
100
fn is_contiguous_growable ( & self ) -> bool {
96
101
false
97
102
}
@@ -102,12 +107,17 @@ pub unsafe trait FlexSource {
102
107
///
103
108
/// The returned value must be constant for a particular instance of `Self`.
104
109
#[ inline]
110
+ #[ default_method_body_is_const]
105
111
fn min_align ( & self ) -> usize {
106
112
1
107
113
}
108
114
}
109
115
110
116
trait FlexSourceExt : FlexSource {
117
+ fn use_growable_pool ( & self ) -> bool ;
118
+ }
119
+
120
+ impl < T : ~const FlexSource > const FlexSourceExt for T {
111
121
#[ inline]
112
122
fn use_growable_pool ( & self ) -> bool {
113
123
// `growable_pool` is used for deallocation and pool growth.
@@ -117,8 +127,6 @@ trait FlexSourceExt: FlexSource {
117
127
}
118
128
}
119
129
120
- impl < T : FlexSource > FlexSourceExt for T { }
121
-
122
130
/// Wraps [`core::alloc::GlobalAlloc`] to implement the [`FlexSource`] trait.
123
131
///
124
132
/// Since this type does not implement [`FlexSource::realloc_inplace_grow`],
@@ -179,8 +187,8 @@ unsafe impl<T: core::alloc::GlobalAlloc, const ALIGN: usize> FlexSource
179
187
/// A wrapper of [`Tlsf`] that automatically acquires fresh memory pools from
180
188
/// [`FlexSource`].
181
189
#[ derive( Debug ) ]
182
- pub struct FlexTlsf < Source : FlexSource , FLBitmap , SLBitmap , const FLLEN : usize , const SLLEN : usize >
183
- {
190
+ # [ must_use = "call `destroy` to drop it cleanly" ]
191
+ pub struct FlexTlsf < Source , FLBitmap , SLBitmap , const FLLEN : usize , const SLLEN : usize > {
184
192
/// The lastly created memory pool.
185
193
growable_pool : Option < Pool > ,
186
194
source : Source ,
@@ -222,7 +230,7 @@ const _: () = if core::mem::size_of::<PoolFtr>() != GRANULARITY / 2 {
222
230
impl PoolFtr {
223
231
/// Get a pointer to `PoolFtr` for a given allocation.
224
232
#[ inline]
225
- fn get_for_alloc ( alloc : NonNull < [ u8 ] > , alloc_align : usize ) -> * mut Self {
233
+ const fn get_for_alloc ( alloc : NonNull < [ u8 ] > , alloc_align : usize ) -> * mut Self {
226
234
let alloc_end = nonnull_slice_end ( alloc) ;
227
235
let mut ptr = alloc_end. wrapping_sub ( core:: mem:: size_of :: < Self > ( ) ) ;
228
236
// If `alloc_end` is not well-aligned, we need to adjust the location
@@ -236,12 +244,12 @@ impl PoolFtr {
236
244
237
245
/// Initialization with a [`FlexSource`] provided by [`Default::default`]
238
246
impl <
239
- Source : FlexSource + Default ,
247
+ Source : FlexSource + ~ const Default ,
240
248
FLBitmap : BinInteger ,
241
249
SLBitmap : BinInteger ,
242
250
const FLLEN : usize ,
243
251
const SLLEN : usize ,
244
- > Default for FlexTlsf < Source , FLBitmap , SLBitmap , FLLEN , SLLEN >
252
+ > const Default for FlexTlsf < Source , FLBitmap , SLBitmap , FLLEN , SLLEN >
245
253
{
246
254
#[ inline]
247
255
fn default ( ) -> Self {
@@ -271,6 +279,8 @@ impl<
271
279
} ;
272
280
}
273
281
282
+ // FIXME: `~const` bounds can't appear on any `impl`s but `impl const Trait for
283
+ // Ty` (This is why the `~const` bounds are applied on each method.)
274
284
impl <
275
285
Source : FlexSource ,
276
286
FLBitmap : BinInteger ,
@@ -281,7 +291,7 @@ impl<
281
291
{
282
292
/// Construct a new `FlexTlsf` object.
283
293
#[ inline]
284
- pub fn new ( source : Source ) -> Self {
294
+ pub const fn new ( source : Source ) -> Self {
285
295
Self {
286
296
source,
287
297
tlsf : Tlsf :: INIT ,
@@ -291,7 +301,7 @@ impl<
291
301
292
302
/// Borrow the contained `Source`.
293
303
#[ inline]
294
- pub fn source_ref ( & self ) -> & Source {
304
+ pub const fn source_ref ( & self ) -> & Source {
295
305
& self . source
296
306
}
297
307
@@ -302,7 +312,7 @@ impl<
302
312
/// The caller must not replace the `Source` with another one or modify
303
313
/// any existing allocations in the `Source`.
304
314
#[ inline]
305
- pub unsafe fn source_mut_unchecked ( & mut self ) -> & mut Source {
315
+ pub const unsafe fn source_mut_unchecked ( & mut self ) -> & mut Source {
306
316
& mut self . source
307
317
}
308
318
@@ -316,14 +326,21 @@ impl<
316
326
/// This method will complete in constant time (assuming `Source`'s methods
317
327
/// do so as well).
318
328
#[ cfg_attr( target_arch = "wasm32" , inline( never) ) ]
319
- pub fn allocate ( & mut self , layout : Layout ) -> Option < NonNull < u8 > > {
329
+ pub const fn allocate ( & mut self , layout : Layout ) -> Option < NonNull < u8 > >
330
+ where
331
+ Source : ~const FlexSource ,
332
+ FLBitmap : ~const BinInteger ,
333
+ SLBitmap : ~const BinInteger ,
334
+ {
320
335
if let Some ( x) = self . tlsf . allocate ( layout) {
321
336
return Some ( x) ;
322
337
}
323
338
324
- self . increase_pool_to_contain_allocation ( layout) ? ;
339
+ const_try ! ( self . increase_pool_to_contain_allocation( layout) ) ;
325
340
326
- self . tlsf . allocate ( layout) . or_else ( || {
341
+ let result = self . tlsf . allocate ( layout) ;
342
+
343
+ if result. is_none ( ) {
327
344
// Not a hard error, but it's still unexpected because
328
345
// `increase_pool_to_contain_allocation` was supposed to make this
329
346
// allocation possible
@@ -332,31 +349,37 @@ impl<
332
349
"the allocation failed despite the effort by \
333
350
`increase_pool_to_contain_allocation`"
334
351
) ;
335
- None
336
- } )
352
+ }
353
+
354
+ result
337
355
}
338
356
339
357
/// Increase the amount of memory pool to guarantee the success of the
340
358
/// given allocation. Returns `Some(())` on success.
341
359
#[ inline]
342
- fn increase_pool_to_contain_allocation ( & mut self , layout : Layout ) -> Option < ( ) > {
360
+ const fn increase_pool_to_contain_allocation ( & mut self , layout : Layout ) -> Option < ( ) >
361
+ where
362
+ Source : ~const FlexSource ,
363
+ FLBitmap : ~const BinInteger ,
364
+ SLBitmap : ~const BinInteger ,
365
+ {
343
366
let use_growable_pool = self . source . use_growable_pool ( ) ;
344
367
345
368
// How many extra bytes we need to get from the source for the
346
369
// allocation to success?
347
- let extra_bytes_well_aligned =
370
+ let extra_bytes_well_aligned = const_try ! (
348
371
Tlsf :: <' static , FLBitmap , SLBitmap , FLLEN , SLLEN >:: pool_size_to_contain_allocation(
349
372
layout,
350
- ) ?;
373
+ )
374
+ ) ;
351
375
352
376
// The sentinel block + the block to store the allocation
353
377
debug_assert ! ( extra_bytes_well_aligned >= GRANULARITY * 2 ) ;
354
378
355
- if let Some ( growable_pool) = self . growable_pool . filter ( |_| use_growable_pool) {
379
+ if let ( Some ( growable_pool) , true ) = ( self . growable_pool , use_growable_pool) {
356
380
// Try to extend an existing memory pool first.
357
- let new_pool_len_desired = growable_pool
358
- . pool_len
359
- . checked_add ( extra_bytes_well_aligned) ?;
381
+ let new_pool_len_desired =
382
+ const_try ! ( growable_pool. pool_len. checked_add( extra_bytes_well_aligned) ) ;
360
383
361
384
// The following assertion should not trip because...
362
385
// - `extra_bytes_well_aligned` returns a value that is at least
@@ -459,13 +482,13 @@ impl<
459
482
// ╰───┬───╯
460
483
// GRANULARITY
461
484
//
462
- extra_bytes_well_aligned. checked_add ( GRANULARITY ) ?
485
+ const_try ! ( extra_bytes_well_aligned. checked_add( GRANULARITY ) )
463
486
} else {
464
487
extra_bytes_well_aligned
465
488
} ;
466
489
467
490
// Safety: `extra_bytes` is non-zero and aligned to `GRANULARITY` bytes
468
- let alloc = unsafe { self . source . alloc ( extra_bytes) ? } ;
491
+ let alloc = const_try ! ( unsafe { self . source. alloc( extra_bytes) } ) ;
469
492
470
493
let is_well_aligned = self . source . min_align ( ) >= super :: GRANULARITY ;
471
494
@@ -477,21 +500,26 @@ impl<
477
500
} else {
478
501
self . tlsf . insert_free_block_ptr ( alloc)
479
502
}
480
- }
481
- . unwrap_or_else ( || unsafe {
482
- debug_assert ! ( false , "`pool_size_to_contain_allocation` is an impostor" ) ;
483
- // Safety: It's unreachable
484
- core:: hint:: unreachable_unchecked ( )
485
- } )
486
- . get ( ) ;
503
+ } ;
504
+ let pool_len = if let Some ( pool_len) = pool_len {
505
+ pool_len. get ( )
506
+ } else {
507
+ unsafe {
508
+ debug_assert ! ( false , "`pool_size_to_contain_allocation` is an impostor" ) ;
509
+ // Safety: It's unreachable
510
+ core:: hint:: unreachable_unchecked ( )
511
+ }
512
+ } ;
487
513
488
514
if self . source . supports_dealloc ( ) {
489
515
// Link the new memory pool's `PoolFtr::prev_alloc_end` to the
490
516
// previous pool (`self.growable_pool`).
491
517
let pool_ftr = PoolFtr :: get_for_alloc ( alloc, self . source . min_align ( ) ) ;
492
- let prev_alloc = self
493
- . growable_pool
494
- . map ( |p| nonnull_slice_from_raw_parts ( p. alloc_start , p. alloc_len ) ) ;
518
+ let prev_alloc = if let Some ( p) = self . growable_pool {
519
+ Some ( nonnull_slice_from_raw_parts ( p. alloc_start , p. alloc_len ) )
520
+ } else {
521
+ None
522
+ } ;
495
523
// Safety: `(*pool_ftr).prev_alloc` is within a pool footer
496
524
// we control
497
525
unsafe { ( * pool_ftr) . prev_alloc = prev_alloc } ;
@@ -522,7 +550,12 @@ impl<
522
550
/// ([`Layout::align`]) as `align`.
523
551
///
524
552
#[ cfg_attr( target_arch = "wasm32" , inline( never) ) ]
525
- pub unsafe fn deallocate ( & mut self , ptr : NonNull < u8 > , align : usize ) {
553
+ pub const unsafe fn deallocate ( & mut self , ptr : NonNull < u8 > , align : usize )
554
+ where
555
+ Source : ~const FlexSource ,
556
+ FLBitmap : ~const BinInteger ,
557
+ SLBitmap : ~const BinInteger ,
558
+ {
526
559
// Safety: Upheld by the caller
527
560
self . tlsf . deallocate ( ptr, align)
528
561
}
@@ -541,7 +574,12 @@ impl<
541
574
///
542
575
/// - `ptr` must denote a memory block previously allocated via `self`.
543
576
///
544
- pub ( crate ) unsafe fn deallocate_unknown_align ( & mut self , ptr : NonNull < u8 > ) {
577
+ pub ( crate ) const unsafe fn deallocate_unknown_align ( & mut self , ptr : NonNull < u8 > )
578
+ where
579
+ Source : ~const FlexSource ,
580
+ FLBitmap : ~const BinInteger ,
581
+ SLBitmap : ~const BinInteger ,
582
+ {
545
583
// Safety: Upheld by the caller
546
584
self . tlsf . deallocate_unknown_align ( ptr)
547
585
}
@@ -562,11 +600,16 @@ impl<
562
600
/// - The memory block must have been allocated with the same alignment
563
601
/// ([`Layout::align`]) as `new_layout`.
564
602
///
565
- pub unsafe fn reallocate (
603
+ pub const unsafe fn reallocate (
566
604
& mut self ,
567
605
ptr : NonNull < u8 > ,
568
606
new_layout : Layout ,
569
- ) -> Option < NonNull < u8 > > {
607
+ ) -> Option < NonNull < u8 > >
608
+ where
609
+ Source : ~const FlexSource ,
610
+ FLBitmap : ~const BinInteger ,
611
+ SLBitmap : ~const BinInteger ,
612
+ {
570
613
// Do this early so that the compiler can de-duplicate the evaluation of
571
614
// `size_of_allocation`, which is done here as well as in
572
615
// `Tlsf::reallocate`.
@@ -584,7 +627,7 @@ impl<
584
627
// the same as the one in `Tlsf::reallocate`, but `self.allocation`
585
628
// here refers to `FlexTlsf::allocate`, which inserts new meory pools
586
629
// as necessary.
587
- let new_ptr = self . allocate ( new_layout) ? ;
630
+ let new_ptr = const_try ! ( self . allocate( new_layout) ) ;
588
631
589
632
// Move the existing data into the new location
590
633
debug_assert ! ( new_layout. size( ) >= old_size) ;
@@ -605,24 +648,36 @@ impl<
605
648
/// - `ptr` must denote a memory block previously allocated via `Self`.
606
649
///
607
650
#[ inline]
608
- pub ( crate ) unsafe fn size_of_allocation_unknown_align ( ptr : NonNull < u8 > ) -> usize {
651
+ pub ( crate ) const unsafe fn size_of_allocation_unknown_align ( ptr : NonNull < u8 > ) -> usize {
609
652
// Safety: Upheld by the caller
610
653
Tlsf :: < ' static , FLBitmap , SLBitmap , FLLEN , SLLEN > :: size_of_allocation_unknown_align ( ptr)
611
654
}
612
655
}
613
656
614
- impl < Source : FlexSource , FLBitmap , SLBitmap , const FLLEN : usize , const SLLEN : usize > Drop
615
- for FlexTlsf < Source , FLBitmap , SLBitmap , FLLEN , SLLEN >
657
+ // FIXME: There isn't a way to add `~const` to a type definition, so this
658
+ // `destroy` cannot be `Drop::drop`
659
+ // FIXME: `~const` bounds can't appear on any `impl`s but
660
+ // `impl const Trait for Ty`
661
+ impl < Source : FlexSource , FLBitmap , SLBitmap , const FLLEN : usize , const SLLEN : usize >
662
+ FlexTlsf < Source , FLBitmap , SLBitmap , FLLEN , SLLEN >
616
663
{
617
- fn drop ( & mut self ) {
664
+ /// Deallocate all memory blocks and destroy `self`.
665
+ pub const fn destroy ( mut self )
666
+ where
667
+ Source : ~const FlexSource + ~const Drop ,
668
+ FLBitmap : ~const Drop ,
669
+ SLBitmap : ~const Drop ,
670
+ {
618
671
if self . source . supports_dealloc ( ) {
619
672
debug_assert ! ( self . source. use_growable_pool( ) ) ;
620
673
621
674
// Deallocate all memory pools
622
675
let align = self . source . min_align ( ) ;
623
- let mut cur_alloc_or_none = self
624
- . growable_pool
625
- . map ( |p| nonnull_slice_from_raw_parts ( p. alloc_start , p. alloc_len ) ) ;
676
+ let mut cur_alloc_or_none = if let Some ( p) = self . growable_pool {
677
+ Some ( nonnull_slice_from_raw_parts ( p. alloc_start , p. alloc_len ) )
678
+ } else {
679
+ None
680
+ } ;
626
681
627
682
while let Some ( cur_alloc) = cur_alloc_or_none {
628
683
// Safety: We control the referenced pool footer
0 commit comments