Skip to content

Commit 9c4a98e

Browse files
committed
Wasm GC: Unify supported alignment and minimum block size in FreeList
Additionally, support alignment 16 for `v128`s inside GC objects. We previously allowed the alignment and minimum block size to be different values, *and* did not round allocation sizes up to the minimum block size. This could lead to bugs like the following sequence of events: * free list initially has a block 0x10..MAX * alloc(size=16) -> 0x10 * free list now has one block: 0x20..MAX * alloc(size=16) -> 0x20 * free list now has one block: 0x30..MAX * dealloc(0x10, size=16) * this merges 0x10..0x20 into 0x30..MAX because the gap between 0x20 and 0x30 is smaller than the minimum block size * dealloc(0x20, size=16) * triggers overlapping blocks assertion failure! If we ensure that the minimum allocation size is a multiple of our supported alignment and we clamp requested allocations' sizes to at least the minimum block size, then we could avoid this. It is simpler, however, to unify our supported alignment and our minimum block size into the same value. This PR does that. No more need for multiple concepts which interact in subtle ways.
1 parent 132a490 commit 9c4a98e

File tree

2 files changed

+109
-39
lines changed

2 files changed

+109
-39
lines changed

crates/wasmtime/src/runtime/vm/gc/enabled/free_list.rs

Lines changed: 97 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -13,13 +13,11 @@ pub(crate) struct FreeList {
1313
}
1414

1515
/// Our minimum and maximum supported alignment. Every allocation is aligned to
16-
/// this.
17-
const ALIGN_U32: u32 = 8;
16+
/// this. Additionally, this is the minimum allocation size, and every
17+
/// allocation is rounded up to this size.
18+
const ALIGN_U32: u32 = 16;
1819
const ALIGN_USIZE: usize = ALIGN_U32 as usize;
1920

20-
/// Our minimum allocation size.
21-
const MIN_BLOCK_SIZE: u32 = 24;
22-
2321
impl FreeList {
2422
/// Create a new `Layout` from the given `size` with an alignment that is
2523
/// compatible with this free list.
@@ -30,6 +28,7 @@ impl FreeList {
3028
/// Create a new `FreeList` for a contiguous region of memory of the given
3129
/// size.
3230
pub fn new(capacity: usize) -> Self {
31+
log::trace!("FreeList::new({capacity})");
3332
let mut free_list = FreeList {
3433
capacity,
3534
free_block_index_to_len: BTreeMap::new(),
@@ -99,7 +98,7 @@ impl FreeList {
9998
debug_assert_eq!(block_index % ALIGN_U32, 0);
10099
debug_assert_eq!(block_len % ALIGN_U32, 0);
101100

102-
if block_len - alloc_size < MIN_BLOCK_SIZE {
101+
if block_len - alloc_size < ALIGN_U32 {
103102
// The block is not large enough to split.
104103
return block_len;
105104
}
@@ -130,6 +129,7 @@ impl FreeList {
130129
///
131130
/// * `Err(_)`:
132131
pub fn alloc(&mut self, layout: Layout) -> Result<Option<NonZeroU32>> {
132+
log::trace!("FreeList::alloc({layout:?})");
133133
let alloc_size = self.check_layout(layout)?;
134134
debug_assert_eq!(alloc_size % ALIGN_U32, 0);
135135

@@ -150,11 +150,14 @@ impl FreeList {
150150
#[cfg(debug_assertions)]
151151
self.check_integrity();
152152

153+
log::trace!("FreeList::alloc({layout:?}) -> {block_index:#x}");
153154
Ok(Some(unsafe { NonZeroU32::new_unchecked(block_index) }))
154155
}
155156

156157
/// Deallocate an object with the given layout.
157158
pub fn dealloc(&mut self, index: NonZeroU32, layout: Layout) {
159+
log::trace!("FreeList::dealloc({index:#x}, {layout:?})");
160+
158161
let index = index.get();
159162
debug_assert_eq!(index % ALIGN_U32, 0);
160163

@@ -182,6 +185,12 @@ impl FreeList {
182185
if blocks_are_contiguous(prev_index, prev_len, index)
183186
&& blocks_are_contiguous(index, alloc_size, next_index) =>
184187
{
188+
log::trace!(
189+
"merging blocks {prev_index:#x}..{prev_len:#x}, {index:#x}..{index_end:#x}, {next_index:#x}..{next_end:#x}",
190+
prev_len = prev_index + prev_len,
191+
index_end = index + u32::try_from(layout.size()).unwrap(),
192+
next_end = next_index + next_len,
193+
);
185194
self.free_block_index_to_len.remove(&next_index);
186195
let merged_block_len = next_index + next_len - prev_index;
187196
debug_assert_eq!(merged_block_len % ALIGN_U32, 0);
@@ -192,6 +201,11 @@ impl FreeList {
192201
(Some((prev_index, prev_len)), _)
193202
if blocks_are_contiguous(prev_index, prev_len, index) =>
194203
{
204+
log::trace!(
205+
"merging blocks {prev_index:#x}..{prev_len:#x}, {index:#x}..{index_end:#x}",
206+
prev_len = prev_index + prev_len,
207+
index_end = index + u32::try_from(layout.size()).unwrap(),
208+
);
195209
let merged_block_len = index + alloc_size - prev_index;
196210
debug_assert_eq!(merged_block_len % ALIGN_U32, 0);
197211
*self.free_block_index_to_len.get_mut(&prev_index).unwrap() = merged_block_len;
@@ -201,6 +215,11 @@ impl FreeList {
201215
(_, Some((next_index, next_len)))
202216
if blocks_are_contiguous(index, alloc_size, next_index) =>
203217
{
218+
log::trace!(
219+
"merging blocks {index:#x}..{index_end:#x}, {next_index:#x}..{next_end:#x}",
220+
index_end = index + u32::try_from(layout.size()).unwrap(),
221+
next_end = next_index + next_len,
222+
);
204223
self.free_block_index_to_len.remove(&next_index);
205224
let merged_block_len = next_index + next_len - index;
206225
debug_assert_eq!(merged_block_len % ALIGN_U32, 0);
@@ -210,6 +229,7 @@ impl FreeList {
210229
// None of the blocks are contiguous: insert this block into the
211230
// free list.
212231
(_, _) => {
232+
log::trace!("cannot merge blocks");
213233
self.free_block_index_to_len.insert(index, alloc_size);
214234
}
215235
}
@@ -271,7 +291,7 @@ impl FreeList {
271291

272292
let len = round_u32_down_to_pow2(end.saturating_sub(start), ALIGN_U32);
273293

274-
let entire_range = if len >= MIN_BLOCK_SIZE {
294+
let entire_range = if len >= ALIGN_U32 {
275295
Some((start, len))
276296
} else {
277297
None
@@ -290,9 +310,25 @@ fn blocks_are_contiguous(prev_index: u32, prev_len: u32, next_index: u32) -> boo
290310
// the size of the `Layout` given to us upon deallocation (aka `prev_len`)
291311
// is smaller than the actual size of the block we allocated.
292312
let end_of_prev = prev_index + prev_len;
293-
debug_assert!(next_index >= end_of_prev);
313+
log::trace!(
314+
"checking for overlapping blocks: \n\
315+
\t prev_index = {prev_index:#x}\n\
316+
\t prev_len = {prev_len:#x}\n\
317+
\tend_of_prev = {end_of_prev:#x}\n\
318+
\t next_index = {next_index:#x}\n\
319+
`next_index` should be >= `end_of_prev`"
320+
);
321+
debug_assert!(
322+
next_index >= end_of_prev,
323+
"overlapping blocks: \n\
324+
\t prev_index = {prev_index:#x}\n\
325+
\t prev_len = {prev_len:#x}\n\
326+
\tend_of_prev = {end_of_prev:#x}\n\
327+
\t next_index = {next_index:#x}\n\
328+
`next_index` should be >= `end_of_prev`"
329+
);
294330
let delta_to_next = next_index - end_of_prev;
295-
delta_to_next < MIN_BLOCK_SIZE
331+
delta_to_next < ALIGN_U32
296332
}
297333

298334
#[inline]
@@ -479,21 +515,20 @@ mod tests {
479515
#[test]
480516
fn allocate_no_split() {
481517
// Create a free list with the capacity to allocate two blocks of size
482-
// `MIN_BLOCK_SIZE`.
483-
let mut free_list =
484-
FreeList::new(ALIGN_USIZE + usize::try_from(MIN_BLOCK_SIZE).unwrap() * 2);
518+
// `ALIGN_U32`.
519+
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 2);
485520

486521
assert_eq!(free_list.free_block_index_to_len.len(), 1);
487522
assert_eq!(
488523
free_list.max_size(),
489-
usize::try_from(MIN_BLOCK_SIZE).unwrap() * 2
524+
usize::try_from(ALIGN_U32).unwrap() * 2
490525
);
491526

492527
// Allocate a block such that the remainder is not worth splitting.
493528
free_list
494529
.alloc(
495530
Layout::from_size_align(
496-
usize::try_from(MIN_BLOCK_SIZE).unwrap() + ALIGN_USIZE,
531+
usize::try_from(ALIGN_U32).unwrap() + ALIGN_USIZE,
497532
ALIGN_USIZE,
498533
)
499534
.unwrap(),
@@ -508,21 +543,20 @@ mod tests {
508543
#[test]
509544
fn allocate_and_split() {
510545
// Create a free list with the capacity to allocate three blocks of size
511-
// `MIN_BLOCK_SIZE`.
512-
let mut free_list =
513-
FreeList::new(ALIGN_USIZE + usize::try_from(MIN_BLOCK_SIZE).unwrap() * 3);
546+
// `ALIGN_U32`.
547+
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 3);
514548

515549
assert_eq!(free_list.free_block_index_to_len.len(), 1);
516550
assert_eq!(
517551
free_list.max_size(),
518-
usize::try_from(MIN_BLOCK_SIZE).unwrap() * 3
552+
usize::try_from(ALIGN_U32).unwrap() * 3
519553
);
520554

521555
// Allocate a block such that the remainder is not worth splitting.
522556
free_list
523557
.alloc(
524558
Layout::from_size_align(
525-
usize::try_from(MIN_BLOCK_SIZE).unwrap() + ALIGN_USIZE,
559+
usize::try_from(ALIGN_U32).unwrap() + ALIGN_USIZE,
526560
ALIGN_USIZE,
527561
)
528562
.unwrap(),
@@ -537,10 +571,9 @@ mod tests {
537571
#[test]
538572
fn dealloc_merge_prev_and_next() {
539573
let layout =
540-
Layout::from_size_align(usize::try_from(MIN_BLOCK_SIZE).unwrap(), ALIGN_USIZE).unwrap();
574+
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
541575

542-
let mut free_list =
543-
FreeList::new(ALIGN_USIZE + usize::try_from(MIN_BLOCK_SIZE).unwrap() * 100);
576+
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
544577
assert_eq!(
545578
free_list.free_block_index_to_len.len(),
546579
1,
@@ -586,10 +619,9 @@ mod tests {
586619
#[test]
587620
fn dealloc_merge_with_prev_and_not_next() {
588621
let layout =
589-
Layout::from_size_align(usize::try_from(MIN_BLOCK_SIZE).unwrap(), ALIGN_USIZE).unwrap();
622+
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
590623

591-
let mut free_list =
592-
FreeList::new(ALIGN_USIZE + usize::try_from(MIN_BLOCK_SIZE).unwrap() * 100);
624+
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
593625
assert_eq!(
594626
free_list.free_block_index_to_len.len(),
595627
1,
@@ -635,10 +667,9 @@ mod tests {
635667
#[test]
636668
fn dealloc_merge_with_next_and_not_prev() {
637669
let layout =
638-
Layout::from_size_align(usize::try_from(MIN_BLOCK_SIZE).unwrap(), ALIGN_USIZE).unwrap();
670+
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
639671

640-
let mut free_list =
641-
FreeList::new(ALIGN_USIZE + usize::try_from(MIN_BLOCK_SIZE).unwrap() * 100);
672+
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
642673
assert_eq!(
643674
free_list.free_block_index_to_len.len(),
644675
1,
@@ -684,10 +715,9 @@ mod tests {
684715
#[test]
685716
fn dealloc_no_merge() {
686717
let layout =
687-
Layout::from_size_align(usize::try_from(MIN_BLOCK_SIZE).unwrap(), ALIGN_USIZE).unwrap();
718+
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
688719

689-
let mut free_list =
690-
FreeList::new(ALIGN_USIZE + usize::try_from(MIN_BLOCK_SIZE).unwrap() * 100);
720+
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
691721
assert_eq!(
692722
free_list.free_block_index_to_len.len(),
693723
1,
@@ -737,18 +767,17 @@ mod tests {
737767
#[test]
738768
fn alloc_size_too_large() {
739769
// Free list with room for 10 min-sized blocks.
740-
let mut free_list =
741-
FreeList::new(ALIGN_USIZE + usize::try_from(MIN_BLOCK_SIZE).unwrap() * 10);
770+
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 10);
742771
assert_eq!(
743772
free_list.max_size(),
744-
usize::try_from(MIN_BLOCK_SIZE).unwrap() * 10
773+
usize::try_from(ALIGN_U32).unwrap() * 10
745774
);
746775

747776
// Attempt to allocate something that is 20 times the size of our
748777
// min-sized block.
749778
assert!(free_list
750779
.alloc(
751-
Layout::from_size_align(usize::try_from(MIN_BLOCK_SIZE).unwrap() * 20, ALIGN_USIZE)
780+
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap() * 20, ALIGN_USIZE)
752781
.unwrap(),
753782
)
754783
.is_err());
@@ -757,20 +786,49 @@ mod tests {
757786
#[test]
758787
fn alloc_align_too_large() {
759788
// Free list with room for 10 min-sized blocks.
760-
let mut free_list =
761-
FreeList::new(ALIGN_USIZE + usize::try_from(MIN_BLOCK_SIZE).unwrap() * 10);
789+
let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 10);
762790
assert_eq!(
763791
free_list.max_size(),
764-
usize::try_from(MIN_BLOCK_SIZE).unwrap() * 10
792+
usize::try_from(ALIGN_U32).unwrap() * 10
765793
);
766794

767795
// Attempt to allocate something that requires larger alignment than
768796
// `FreeList` supports.
769797
assert!(free_list
770798
.alloc(
771-
Layout::from_size_align(usize::try_from(MIN_BLOCK_SIZE).unwrap(), ALIGN_USIZE * 2)
799+
Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE * 2)
772800
.unwrap(),
773801
)
774802
.is_err());
775803
}
804+
805+
#[test]
806+
fn all_pairwise_alloc_dealloc_orderings() {
807+
let tests: &[fn(&mut FreeList, Layout)] = &[
808+
|f, l| {
809+
let a = f.alloc(l).unwrap().unwrap();
810+
let b = f.alloc(l).unwrap().unwrap();
811+
f.dealloc(a, l);
812+
f.dealloc(b, l);
813+
},
814+
|f, l| {
815+
let a = f.alloc(l).unwrap().unwrap();
816+
let b = f.alloc(l).unwrap().unwrap();
817+
f.dealloc(b, l);
818+
f.dealloc(a, l);
819+
},
820+
|f, l| {
821+
let a = f.alloc(l).unwrap().unwrap();
822+
f.dealloc(a, l);
823+
let b = f.alloc(l).unwrap().unwrap();
824+
f.dealloc(b, l);
825+
},
826+
];
827+
828+
let l = Layout::from_size_align(16, 8).unwrap();
829+
for test in tests {
830+
let mut f = FreeList::new(0x100);
831+
test(&mut f, l);
832+
}
833+
}
776834
}
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
;;! gc = true
2+
;;! simd = true
3+
4+
(module
5+
(type $s (struct (field v128)))
6+
(func (export "alloc")
7+
struct.new_default $s
8+
drop
9+
)
10+
)
11+
12+
(assert_return (invoke "alloc"))

0 commit comments

Comments
 (0)