Skip to content

Commit 04e7e10

Browse files
committed
Support adding additional capacity to FreeLists
Split out from bytecodealliance#10503
1 parent a4700cb commit 04e7e10

File tree

1 file changed

+82
-70
lines changed

1 file changed

+82
-70
lines changed

crates/wasmtime/src/runtime/vm/gc/enabled/free_list.rs

Lines changed: 82 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
use crate::prelude::*;
22
use alloc::collections::BTreeMap;
3-
use core::cmp;
43
use core::{alloc::Layout, num::NonZeroU32, ops::Bound};
54

65
/// A very simple first-fit free list for use by our garbage collectors.
@@ -28,7 +27,7 @@ impl FreeList {
2827
/// Create a new `FreeList` for a contiguous region of memory of the given
2928
/// size.
3029
pub fn new(capacity: usize) -> Self {
31-
log::trace!("FreeList::new({capacity})");
30+
log::debug!("FreeList::new({capacity})");
3231
let mut free_list = FreeList {
3332
capacity,
3433
free_block_index_to_len: BTreeMap::new(),
@@ -37,8 +36,62 @@ impl FreeList {
3736
free_list
3837
}
3938

39+
/// Add additional capacity to this free list.
40+
#[allow(dead_code)] // TODO: becomes used in https://github.com/bytecodealliance/wasmtime/pull/10503
41+
pub fn add_capacity(&mut self, additional: usize) {
42+
let old_cap = self.capacity;
43+
self.capacity = self.capacity.saturating_add(additional);
44+
log::debug!(
45+
"FreeList::add_capacity({additional:#x}): capacity growing from {old_cap:#x} to {:#x}",
46+
self.capacity
47+
);
48+
49+
// If we are adding capacity beyond what a `u32` can address, then we
50+
// can't actually use that capacity, so don't bother adding a new block
51+
// to the free list.
52+
let old_cap_rounded = round_usize_down_to_pow2(old_cap, ALIGN_USIZE);
53+
let Ok(old_cap_rounded) = u32::try_from(old_cap_rounded) else {
54+
return;
55+
};
56+
57+
// Our new block's index is the end of the old capacity.
58+
let index = NonZeroU32::new(old_cap_rounded).unwrap_or(
59+
// But additionally all indices must be non-zero, so start the new
60+
// block at the first aligned index if necessary.
61+
NonZeroU32::new(ALIGN_U32).unwrap(),
62+
);
63+
64+
// If, after rounding everything to our alignment, we aren't actually
65+
// gaining any new capacity, then don't add a new block to the free
66+
// list.
67+
let new_cap = u32::try_from(self.capacity).unwrap_or(u32::MAX);
68+
let new_cap = round_u32_down_to_pow2(new_cap, ALIGN_U32);
69+
debug_assert!(new_cap >= index.get());
70+
let size = new_cap - index.get();
71+
debug_assert_eq!(size % ALIGN_U32, 0);
72+
if size == 0 {
73+
return;
74+
}
75+
76+
// If we can't represent this block in a `Layout`, then don't add it to
77+
// our free list either.
78+
let Ok(layout) = Layout::from_size_align(usize::try_from(size).unwrap(), ALIGN_USIZE)
79+
else {
80+
return;
81+
};
82+
83+
// Okay! Add a block to our free list for the new capacity, potentially
84+
// merging it with existing blocks at the end of the free list.
85+
log::trace!(
86+
"FreeList::add_capacity(..): adding block {index:#x}..{:#x}",
87+
index.get() + size
88+
);
89+
self.dealloc(index, layout);
90+
}
91+
92+
#[cfg(test)]
4093
fn max_size(&self) -> usize {
41-
let cap = cmp::min(self.capacity, usize::try_from(u32::MAX).unwrap());
94+
let cap = core::cmp::min(self.capacity, usize::try_from(u32::MAX).unwrap());
4295
round_usize_down_to_pow2(cap.saturating_sub(ALIGN_USIZE), ALIGN_USIZE)
4396
}
4497

@@ -47,21 +100,11 @@ impl FreeList {
47100
fn check_layout(&self, layout: Layout) -> Result<u32> {
48101
ensure!(
49102
layout.align() <= ALIGN_USIZE,
50-
"requested allocation's alignment of {} is greater than max supported alignment of {ALIGN_USIZE}",
103+
"requested allocation's alignment of {} is greater than max supported \
104+
alignment of {ALIGN_USIZE}",
51105
layout.align(),
52106
);
53107

54-
if layout.size() > self.max_size() {
55-
let trap = crate::Trap::AllocationTooLarge;
56-
let err = anyhow::Error::from(trap);
57-
let err = err.context(format!(
58-
"requested allocation's size of {} is greater than the max supported size of {}",
59-
layout.size(),
60-
self.max_size(),
61-
));
62-
return Err(err);
63-
}
64-
65108
let alloc_size = u32::try_from(layout.size()).map_err(|e| {
66109
let trap = crate::Trap::AllocationTooLarge;
67110
let err = anyhow::Error::from(trap);
@@ -377,6 +420,8 @@ mod tests {
377420
#[test]
378421
#[cfg_attr(miri, ignore)]
379422
fn check_no_fragmentation((capacity, ops) in ops()) {
423+
let _ = env_logger::try_init();
424+
380425
// Map from allocation id to ptr.
381426
let mut live = HashMap::new();
382427

@@ -519,23 +564,14 @@ mod tests {
519564
fn allocate_no_split() {
520565
// Create a free list with the capacity to allocate two blocks of size
521566
// `ALIGN_U32`.
522-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 2);
567+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 2);
523568

524569
assert_eq!(free_list.free_block_index_to_len.len(), 1);
525-
assert_eq!(
526-
free_list.max_size(),
527-
usize::try_from(ALIGN_U32).unwrap() * 2
528-
);
570+
assert_eq!(free_list.max_size(), ALIGN_USIZE * 2);
529571

530572
// Allocate a block such that the remainder is not worth splitting.
531573
free_list
532-
.alloc(
533-
Layout::from_size_align(
534-
usize::try_from(ALIGN_U32).unwrap() + ALIGN_USIZE,
535-
ALIGN_USIZE,
536-
)
537-
.unwrap(),
538-
)
574+
.alloc(Layout::from_size_align(ALIGN_USIZE + ALIGN_USIZE, ALIGN_USIZE).unwrap())
539575
.expect("allocation within 'static' free list limits")
540576
.expect("have free space available for allocation");
541577

@@ -547,23 +583,14 @@ mod tests {
547583
fn allocate_and_split() {
548584
// Create a free list with the capacity to allocate three blocks of size
549585
// `ALIGN_U32`.
550-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 3);
586+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 3);
551587

552588
assert_eq!(free_list.free_block_index_to_len.len(), 1);
553-
assert_eq!(
554-
free_list.max_size(),
555-
usize::try_from(ALIGN_U32).unwrap() * 3
556-
);
589+
assert_eq!(free_list.max_size(), ALIGN_USIZE * 3);
557590

558591
// Allocate a block such that the remainder is not worth splitting.
559592
free_list
560-
.alloc(
561-
Layout::from_size_align(
562-
usize::try_from(ALIGN_U32).unwrap() + ALIGN_USIZE,
563-
ALIGN_USIZE,
564-
)
565-
.unwrap(),
566-
)
593+
.alloc(Layout::from_size_align(ALIGN_USIZE + ALIGN_USIZE, ALIGN_USIZE).unwrap())
567594
.expect("allocation within 'static' free list limits")
568595
.expect("have free space available for allocation");
569596

@@ -573,10 +600,9 @@ mod tests {
573600

574601
#[test]
575602
fn dealloc_merge_prev_and_next() {
576-
let layout =
577-
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
603+
let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
578604

579-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
605+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 100);
580606
assert_eq!(
581607
free_list.free_block_index_to_len.len(),
582608
1,
@@ -621,10 +647,9 @@ mod tests {
621647

622648
#[test]
623649
fn dealloc_merge_with_prev_and_not_next() {
624-
let layout =
625-
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
650+
let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
626651

627-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
652+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 100);
628653
assert_eq!(
629654
free_list.free_block_index_to_len.len(),
630655
1,
@@ -669,10 +694,9 @@ mod tests {
669694

670695
#[test]
671696
fn dealloc_merge_with_next_and_not_prev() {
672-
let layout =
673-
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
697+
let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
674698

675-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
699+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 100);
676700
assert_eq!(
677701
free_list.free_block_index_to_len.len(),
678702
1,
@@ -717,10 +741,9 @@ mod tests {
717741

718742
#[test]
719743
fn dealloc_no_merge() {
720-
let layout =
721-
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
744+
let layout = Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE).unwrap();
722745

723-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
746+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 100);
724747
assert_eq!(
725748
free_list.free_block_index_to_len.len(),
726749
1,
@@ -770,38 +793,27 @@ mod tests {
770793
#[test]
771794
fn alloc_size_too_large() {
772795
// Free list with room for 10 min-sized blocks.
773-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 10);
774-
assert_eq!(
775-
free_list.max_size(),
776-
usize::try_from(ALIGN_U32).unwrap() * 10
777-
);
796+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 10);
797+
assert_eq!(free_list.max_size(), ALIGN_USIZE * 10);
778798

779799
// Attempt to allocate something that is 20 times the size of our
780800
// min-sized block.
781801
assert!(free_list
782-
.alloc(
783-
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap() * 20, ALIGN_USIZE)
784-
.unwrap(),
785-
)
786-
.is_err());
802+
.alloc(Layout::from_size_align(ALIGN_USIZE * 20, ALIGN_USIZE).unwrap())
803+
.unwrap()
804+
.is_none());
787805
}
788806

789807
#[test]
790808
fn alloc_align_too_large() {
791809
// Free list with room for 10 min-sized blocks.
792-
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 10);
793-
assert_eq!(
794-
free_list.max_size(),
795-
usize::try_from(ALIGN_U32).unwrap() * 10
796-
);
810+
let mut free_list = FreeList::new(ALIGN_USIZE + ALIGN_USIZE * 10);
811+
assert_eq!(free_list.max_size(), ALIGN_USIZE * 10);
797812

798813
// Attempt to allocate something that requires larger alignment than
799814
// `FreeList` supports.
800815
assert!(free_list
801-
.alloc(
802-
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE * 2)
803-
.unwrap(),
804-
)
816+
.alloc(Layout::from_size_align(ALIGN_USIZE, ALIGN_USIZE * 2).unwrap(),)
805817
.is_err());
806818
}
807819

0 commit comments

Comments
 (0)