Skip to content

Commit 12195b8

Browse files
authored
Support adding additional capacity to FreeLists (#10516)
* Support adding additional capacity to `FreeList`s Split out from #10503 * Add a test for `FreeList::add_capacity` * Also test when old capacity is not a multiple of our alignment * Add comments about alignment-rounding and free list capacity * Don't run little example as a doc test
1 parent f01ebca commit 12195b8

File tree

1 file changed

+154
-70
lines changed

1 file changed

+154
-70
lines changed

crates/wasmtime/src/runtime/vm/gc/enabled/free_list.rs

Lines changed: 154 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,26 @@
11
use crate::prelude::*;
22
use alloc::collections::BTreeMap;
3-
use core::cmp;
43
use core::{alloc::Layout, num::NonZeroU32, ops::Bound};
54

65
/// A very simple first-fit free list for use by our garbage collectors.
76
pub(crate) struct FreeList {
87
/// The total capacity of the contiguous range of memory we are managing.
8+
///
9+
/// NB: we keep `self.capacity` unrounded because otherwise we would get
10+
/// rounding errors where we lose track of the actual capacity we have when
11+
/// repeatedly adding capacity `n` where `n < ALIGN`:
12+
///
13+
/// ```ignore
14+
/// let mut free_list = FreeList::new(0);
15+
/// loop {
16+
/// free_list.add_capacity(1);
17+
/// }
18+
/// ```
19+
///
20+
/// If we eagerly rounded capacity down to our alignment on every call to
21+
/// `add_capacity`, the free list would always think it has zero capacity,
22+
/// even though it would have enough capacity for many allocations after
23+
/// enough iterations of the loop.
924
capacity: usize,
1025
/// Our free blocks, as a map from index to length of the free block at that
1126
/// index.
@@ -28,7 +43,7 @@ impl FreeList {
2843
/// Create a new `FreeList` for a contiguous region of memory of the given
2944
/// size.
3045
pub fn new(capacity: usize) -> Self {
31-
log::trace!("FreeList::new({capacity})");
46+
log::debug!("FreeList::new({capacity})");
3247
let mut free_list = FreeList {
3348
capacity,
3449
free_block_index_to_len: BTreeMap::new(),
@@ -37,8 +52,66 @@ impl FreeList {
3752
free_list
3853
}
3954

55+
/// Add additional capacity to this free list.
56+
#[allow(dead_code)] // TODO: becomes used in https://github.com/bytecodealliance/wasmtime/pull/10503
57+
pub fn add_capacity(&mut self, additional: usize) {
58+
let old_cap = self.capacity;
59+
self.capacity = self.capacity.saturating_add(additional);
60+
log::debug!(
61+
"FreeList::add_capacity({additional:#x}): capacity growing from {old_cap:#x} to {:#x}",
62+
self.capacity
63+
);
64+
65+
// See the comment on `self.capacity` about why we need to do the
66+
// alignment-rounding here, rather than keeping `self.capacity` aligned
67+
// at rest.
68+
let old_cap_rounded = round_usize_down_to_pow2(old_cap, ALIGN_USIZE);
69+
70+
// If we are adding capacity beyond what a `u32` can address, then we
71+
// can't actually use that capacity, so don't bother adding a new block
72+
// to the free list.
73+
let Ok(old_cap_rounded) = u32::try_from(old_cap_rounded) else {
74+
return;
75+
};
76+
77+
// Our new block's index is the end of the old capacity.
78+
let index = NonZeroU32::new(old_cap_rounded).unwrap_or(
79+
// But additionally all indices must be non-zero, so start the new
80+
// block at the first aligned index if necessary.
81+
NonZeroU32::new(ALIGN_U32).unwrap(),
82+
);
83+
84+
// If, after rounding everything to our alignment, we aren't actually
85+
// gaining any new capacity, then don't add a new block to the free
86+
// list.
87+
let new_cap = u32::try_from(self.capacity).unwrap_or(u32::MAX);
88+
let new_cap = round_u32_down_to_pow2(new_cap, ALIGN_U32);
89+
debug_assert!(new_cap >= index.get());
90+
let size = new_cap - index.get();
91+
debug_assert_eq!(size % ALIGN_U32, 0);
92+
if size == 0 {
93+
return;
94+
}
95+
96+
// If we can't represent this block in a `Layout`, then don't add it to
97+
// our free list either.
98+
let Ok(layout) = Layout::from_size_align(usize::try_from(size).unwrap(), ALIGN_USIZE)
99+
else {
100+
return;
101+
};
102+
103+
// Okay! Add a block to our free list for the new capacity, potentially
104+
// merging it with existing blocks at the end of the free list.
105+
log::trace!(
106+
"FreeList::add_capacity(..): adding block {index:#x}..{:#x}",
107+
index.get() + size
108+
);
109+
self.dealloc(index, layout);
110+
}
111+
112+
#[cfg(test)]
40113
fn max_size(&self) -> usize {
41-
let cap = cmp::min(self.capacity, usize::try_from(u32::MAX).unwrap());
114+
let cap = core::cmp::min(self.capacity, usize::try_from(u32::MAX).unwrap());
42115
round_usize_down_to_pow2(cap.saturating_sub(ALIGN_USIZE), ALIGN_USIZE)
43116
}
44117

@@ -47,21 +120,11 @@ impl FreeList {
47120
fn check_layout(&self, layout: Layout) -> Result<u32> {
48121
ensure!(
49122
layout.align() <= ALIGN_USIZE,
50-
"requested allocation's alignment of {} is greater than max supported alignment of {ALIGN_USIZE}",
123+
"requested allocation's alignment of {} is greater than max supported \
124+
alignment of {ALIGN_USIZE}",
51125
layout.align(),
52126
);
53127

54-
if layout.size() > self.max_size() {
55-
let trap = crate::Trap::AllocationTooLarge;
56-
let err = anyhow::Error::from(trap);
57-
let err = err.context(format!(
58-
"requested allocation's size of {} is greater than the max supported size of {}",
59-
layout.size(),
60-
self.max_size(),
61-
));
62-
return Err(err);
63-
}
64-
65128
let alloc_size = u32::try_from(layout.size()).map_err(|e| {
66129
let trap = crate::Trap::AllocationTooLarge;
67130
let err = anyhow::Error::from(trap);
@@ -377,6 +440,8 @@ mod tests {
377440
#[test]
378441
#[cfg_attr(miri, ignore)]
379442
fn check_no_fragmentation((capacity, ops) in ops()) {
443+
let _ = env_logger::try_init();
444+
380445
// Map from allocation id to ptr.
381446
let mut live = HashMap::new();
382447

@@ -519,23 +584,14 @@ mod tests {
519584
fn allocate_no_split() {
520585
// Create a free list with the capacity to allocate two blocks of size
521586
// `ALIGN_U32`.
522-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 2);
587+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 2);
523588

524589
assert_eq!(free_list.free_block_index_to_len.len(), 1);
525-
assert_eq!(
526-
free_list.max_size(),
527-
usize::try_from(ALIGN_U32).unwrap() * 2
528-
);
590+
assert_eq!(free_list.max_size(), ALIGN_USIZE * 2);
529591

530592
// Allocate a block such that the remainder is not worth splitting.
531593
free_list
532-
.alloc(
533-
Layout::from_size_align(
534-
usize::try_from(ALIGN_U32).unwrap() + ALIGN_USIZE,
535-
ALIGN_USIZE,
536-
)
537-
.unwrap(),
538-
)
594+
.alloc(Layout::from_size_align(ALIGN_USIZE + ALIGN_USIZE, ALIGN_USIZE).unwrap())
539595
.expect("allocation within 'static' free list limits")
540596
.expect("have free space available for allocation");
541597

@@ -547,23 +603,14 @@ mod tests {
547603
fn allocate_and_split() {
548604
// Create a free list with the capacity to allocate three blocks of size
549605
// `ALIGN_U32`.
550-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 3);
606+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 3);
551607

552608
assert_eq!(free_list.free_block_index_to_len.len(), 1);
553-
assert_eq!(
554-
free_list.max_size(),
555-
usize::try_from(ALIGN_U32).unwrap() * 3
556-
);
609+
assert_eq!(free_list.max_size(), ALIGN_USIZE * 3);
557610

558611
// Allocate a block such that the remainder is not worth splitting.
559612
free_list
560-
.alloc(
561-
Layout::from_size_align(
562-
usize::try_from(ALIGN_U32).unwrap() + ALIGN_USIZE,
563-
ALIGN_USIZE,
564-
)
565-
.unwrap(),
566-
)
613+
.alloc(Layout::from_size_align(ALIGN_USIZE + ALIGN_USIZE, ALIGN_USIZE).unwrap())
567614
.expect("allocation within 'static' free list limits")
568615
.expect("have free space available for allocation");
569616

@@ -573,10 +620,9 @@ mod tests {
573620

574621
#[test]
575622
fn dealloc_merge_prev_and_next() {
576-
let layout =
577-
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
623+
let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
578624

579-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
625+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 100);
580626
assert_eq!(
581627
free_list.free_block_index_to_len.len(),
582628
1,
@@ -621,10 +667,9 @@ mod tests {
621667

622668
#[test]
623669
fn dealloc_merge_with_prev_and_not_next() {
624-
let layout =
625-
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
670+
let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
626671

627-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
672+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 100);
628673
assert_eq!(
629674
free_list.free_block_index_to_len.len(),
630675
1,
@@ -669,10 +714,9 @@ mod tests {
669714

670715
#[test]
671716
fn dealloc_merge_with_next_and_not_prev() {
672-
let layout =
673-
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
717+
let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
674718

675-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
719+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 100);
676720
assert_eq!(
677721
free_list.free_block_index_to_len.len(),
678722
1,
@@ -717,10 +761,9 @@ mod tests {
717761

718762
#[test]
719763
fn dealloc_no_merge() {
720-
let layout =
721-
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
764+
let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
722765

723-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
766+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 100);
724767
assert_eq!(
725768
free_list.free_block_index_to_len.len(),
726769
1,
@@ -770,38 +813,27 @@ mod tests {
770813
#[test]
771814
fn alloc_size_too_large() {
772815
// Free list with room for 10 min-sized blocks.
773-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 10);
774-
assert_eq!(
775-
free_list.max_size(),
776-
usize::try_from(ALIGN_U32).unwrap() * 10
777-
);
816+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 10);
817+
assert_eq!(free_list.max_size(), ALIGN_USIZE * 10);
778818

779819
// Attempt to allocate something that is 20 times the size of our
780820
// min-sized block.
781821
assert!(free_list
782-
.alloc(
783-
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap() * 20, ALIGN_USIZE)
784-
.unwrap(),
785-
)
786-
.is_err());
822+
.alloc(Layout::from_size_align(ALIGN_USIZE * 20, ALIGN_USIZE).unwrap())
823+
.unwrap()
824+
.is_none());
787825
}
788826

789827
#[test]
790828
fn alloc_align_too_large() {
791829
// Free list with room for 10 min-sized blocks.
792-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 10);
793-
assert_eq!(
794-
free_list.max_size(),
795-
usize::try_from(ALIGN_U32).unwrap() * 10
796-
);
830+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 10);
831+
assert_eq!(free_list.max_size(), ALIGN_USIZE * 10);
797832

798833
// Attempt to allocate something that requires larger alignment than
799834
// `FreeList` supports.
800835
assert!(free_list
801-
.alloc(
802-
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE * 2)
803-
.unwrap(),
804-
)
836+
.alloc(Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE * 2).unwrap(),)
805837
.is_err());
806838
}
807839

@@ -834,4 +866,56 @@ mod tests {
834866
test(&mut f, l);
835867
}
836868
}
869+
870+
#[test]
871+
fn add_capacity() {
872+
let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
873+
874+
let mut free_list = FreeList::new(0);
875+
assert!(free_list.alloc(layout).unwrap().is_none(), "no capacity");
876+
877+
free_list.add_capacity(ALIGN_USIZE);
878+
assert!(
879+
free_list.alloc(layout).unwrap().is_none(),
880+
"still not enough capacity because we won't allocate the zero index"
881+
);
882+
883+
free_list.add_capacity(1);
884+
assert!(
885+
free_list.alloc(layout).unwrap().is_none(),
886+
"still not enough capacity because allocations are multiples of the alignment"
887+
);
888+
889+
free_list.add_capacity(ALIGN_USIZE - 1);
890+
let a = free_list
891+
.alloc(layout)
892+
.unwrap()
893+
.expect("now we have enough capacity for one");
894+
assert!(
895+
free_list.alloc(layout).unwrap().is_none(),
896+
"but not enough capacity for two"
897+
);
898+
899+
free_list.add_capacity(ALIGN_USIZE);
900+
let b = free_list
901+
.alloc(layout)
902+
.unwrap()
903+
.expect("now we have enough capacity for two");
904+
905+
free_list.dealloc(a, layout);
906+
free_list.dealloc(b, layout);
907+
assert_eq!(
908+
free_list.free_block_index_to_len.len(),
909+
1,
910+
"`dealloc` should merge blocks from different `add_capacity` calls together"
911+
);
912+
913+
free_list.add_capacity(ALIGN_USIZE);
914+
assert_eq!(
915+
free_list.free_block_index_to_len.len(),
916+
1,
917+
"`add_capacity` should eagerly merge new capacity into the last block \
918+
in the free list, when possible"
919+
);
920+
}
837921
}

0 commit comments

Comments
 (0)