Skip to content

Commit c6dddea

Browse files
authored
Minimize lazy allocation of the GC store (#11411)
* Minimize lazy allocation of the GC store This commit is an effort to minimize the number of entrypoints which might lazily allocate a GC store. The is currently done through `StoreOpaque::gc_store_mut` but this method is very commonly used meaning that there are many many places to audit for lazily allocating a GC store. The reason that this needs an audit is that lazy allocation is an async operation right now that must be on a fiber and is something I'm looking to fix as part of #11262. This commit performs a few refactorings to achieve this: * `gc_store_mut` is renamed to `ensure_gc_store`. This is intended to be an `async` function in the future and clearly demarcates where lazy allocation of a GC store is occurring. * `require_gc_store{,_mut}` is now added which is a pure accessor of the GC store with no lazy allocation. Most locations previously using `gc_store_mut` are updated to use this instead. Documentation is added to store methods to clearly indicate which ones are allocating and which ones should only be called in a context where allocation should already have happened. * Fix configured build * Relax GC store restrictions in more places * Review comments on documentation * Move `ensure_gc_store` calls during instantiation Instead update `needs_gc_heap` with the tables that are added to a module and rely on instantiation to create the GC heap. * Shuffle around some code * Fix CI and review comments * Add in a few more i31 cases for externref
1 parent 8b68f6f commit c6dddea

File tree

23 files changed

+336
-191
lines changed

23 files changed

+336
-191
lines changed

crates/environ/src/compile/module_environ.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -360,6 +360,7 @@ impl<'a, 'data> ModuleEnvironment<'a, 'data> {
360360
for entry in tables {
361361
let wasmparser::Table { ty, init } = entry?;
362362
let table = self.convert_table_type(&ty)?;
363+
self.result.module.needs_gc_heap |= table.ref_type.is_vmgcref_type();
363364
self.result.module.tables.push(table);
364365
let init = match init {
365366
wasmparser::TableInit::RefNull => TableInitialValue::Null {

crates/wasmtime/src/runtime/externals/global.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ impl Global {
158158
HeapType::NoFunc => Ref::Func(None),
159159

160160
HeapType::Extern => Ref::Extern(definition.as_gc_ref().map(|r| {
161-
let r = store.unwrap_gc_store_mut().clone_gc_ref(r);
161+
let r = store.clone_gc_ref(r);
162162
ExternRef::from_cloned_gc_ref(&mut store, r)
163163
})),
164164

@@ -180,7 +180,7 @@ impl Global {
180180
| HeapType::ConcreteExn(_) => definition
181181
.as_gc_ref()
182182
.map(|r| {
183-
let r = store.unwrap_gc_store_mut().clone_gc_ref(r);
183+
let r = store.clone_gc_ref(r);
184184
AnyRef::from_cloned_gc_ref(&mut store, r)
185185
})
186186
.into(),
@@ -236,23 +236,23 @@ impl Global {
236236
Some(e) => Some(e.try_gc_ref(&store)?.unchecked_copy()),
237237
};
238238
let new = new.as_ref();
239-
definition.write_gc_ref(store.unwrap_gc_store_mut(), new);
239+
definition.write_gc_ref(&mut store, new);
240240
}
241241
Val::AnyRef(a) => {
242242
let new = match a {
243243
None => None,
244244
Some(a) => Some(a.try_gc_ref(&store)?.unchecked_copy()),
245245
};
246246
let new = new.as_ref();
247-
definition.write_gc_ref(store.unwrap_gc_store_mut(), new);
247+
definition.write_gc_ref(&mut store, new);
248248
}
249249
Val::ExnRef(e) => {
250250
let new = match e {
251251
None => None,
252252
Some(e) => Some(e.try_gc_ref(&store)?.unchecked_copy()),
253253
};
254254
let new = new.as_ref();
255-
definition.write_gc_ref(store.unwrap_gc_store_mut(), new);
255+
definition.write_gc_ref(&mut store, new);
256256
}
257257
}
258258
}

crates/wasmtime/src/runtime/gc/enabled/anyref.rs

Lines changed: 18 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -280,11 +280,7 @@ impl AnyRef {
280280
// (Not actually memory unsafe since we have indexed GC heaps.)
281281
pub(crate) fn _from_raw(store: &mut AutoAssertNoGc, raw: u32) -> Option<Rooted<Self>> {
282282
let gc_ref = VMGcRef::from_raw_u32(raw)?;
283-
let gc_ref = if gc_ref.is_i31() {
284-
gc_ref.copy_i31()
285-
} else {
286-
store.unwrap_gc_store_mut().clone_gc_ref(&gc_ref)
287-
};
283+
let gc_ref = store.clone_gc_ref(&gc_ref);
288284
Some(Self::from_cloned_gc_ref(store, gc_ref))
289285
}
290286

@@ -340,7 +336,7 @@ impl AnyRef {
340336
let raw = if gc_ref.is_i31() {
341337
gc_ref.as_raw_non_zero_u32()
342338
} else {
343-
store.gc_store_mut()?.expose_gc_ref_to_wasm(gc_ref)
339+
store.require_gc_store_mut()?.expose_gc_ref_to_wasm(gc_ref)
344340
};
345341
Ok(raw.get())
346342
}
@@ -364,7 +360,7 @@ impl AnyRef {
364360
return Ok(HeapType::I31);
365361
}
366362

367-
let header = store.gc_store()?.header(gc_ref);
363+
let header = store.require_gc_store()?.header(gc_ref);
368364

369365
if header.kind().matches(VMGcKind::ExternRef) {
370366
return Ok(HeapType::Any);
@@ -437,7 +433,11 @@ impl AnyRef {
437433
pub(crate) fn _is_eqref(&self, store: &StoreOpaque) -> Result<bool> {
438434
assert!(self.comes_from_same_store(store));
439435
let gc_ref = self.inner.try_gc_ref(store)?;
440-
Ok(gc_ref.is_i31() || store.gc_store()?.kind(gc_ref).matches(VMGcKind::EqRef))
436+
Ok(gc_ref.is_i31()
437+
|| store
438+
.require_gc_store()?
439+
.kind(gc_ref)
440+
.matches(VMGcKind::EqRef))
441441
}
442442

443443
/// Downcast this `anyref` to an `eqref`.
@@ -558,7 +558,11 @@ impl AnyRef {
558558

559559
pub(crate) fn _is_struct(&self, store: &StoreOpaque) -> Result<bool> {
560560
let gc_ref = self.inner.try_gc_ref(store)?;
561-
Ok(!gc_ref.is_i31() && store.gc_store()?.kind(gc_ref).matches(VMGcKind::StructRef))
561+
Ok(!gc_ref.is_i31()
562+
&& store
563+
.require_gc_store()?
564+
.kind(gc_ref)
565+
.matches(VMGcKind::StructRef))
562566
}
563567

564568
/// Downcast this `anyref` to a `structref`.
@@ -622,7 +626,11 @@ impl AnyRef {
622626

623627
pub(crate) fn _is_array(&self, store: &StoreOpaque) -> Result<bool> {
624628
let gc_ref = self.inner.try_gc_ref(store)?;
625-
Ok(!gc_ref.is_i31() && store.gc_store()?.kind(gc_ref).matches(VMGcKind::ArrayRef))
629+
Ok(!gc_ref.is_i31()
630+
&& store
631+
.require_gc_store()?
632+
.kind(gc_ref)
633+
.matches(VMGcKind::ArrayRef))
626634
}
627635

628636
/// Downcast this `anyref` to an `arrayref`.

crates/wasmtime/src/runtime/gc/enabled/arrayref.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -411,7 +411,7 @@ impl ArrayRef {
411411
// Allocate the array and write each field value into the appropriate
412412
// offset.
413413
let arrayref = store
414-
.gc_store_mut()?
414+
.require_gc_store_mut()?
415415
.alloc_uninit_array(allocator.type_index(), len, allocator.layout())
416416
.context("unrecoverable error when allocating new `arrayref`")?
417417
.map_err(|n| GcHeapOutOfMemory::new((), n))?;
@@ -432,7 +432,7 @@ impl ArrayRef {
432432
})() {
433433
Ok(()) => Ok(Rooted::new(&mut store, arrayref.into())),
434434
Err(e) => {
435-
store.gc_store_mut()?.dealloc_uninit_array(arrayref);
435+
store.require_gc_store_mut()?.dealloc_uninit_array(arrayref);
436436
Err(e)
437437
}
438438
}
@@ -633,7 +633,7 @@ impl ArrayRef {
633633
assert!(self.comes_from_same_store(store));
634634
let gc_ref = self.inner.try_gc_ref(store)?;
635635
debug_assert!({
636-
let header = store.gc_store()?.header(gc_ref);
636+
let header = store.require_gc_store()?.header(gc_ref);
637637
header.kind().matches(VMGcKind::ArrayRef)
638638
});
639639
let arrayref = gc_ref.as_arrayref_unchecked();
@@ -667,7 +667,7 @@ impl ArrayRef {
667667
let store = AutoAssertNoGc::new(store);
668668

669669
let gc_ref = self.inner.try_gc_ref(&store)?;
670-
let header = store.gc_store()?.header(gc_ref);
670+
let header = store.require_gc_store()?.header(gc_ref);
671671
debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
672672

673673
let len = self._len(&store)?;
@@ -720,7 +720,7 @@ impl ArrayRef {
720720
fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
721721
assert!(self.comes_from_same_store(&store));
722722
let gc_ref = self.inner.try_gc_ref(store)?;
723-
Ok(store.gc_store()?.header(gc_ref))
723+
Ok(store.require_gc_store()?.header(gc_ref))
724724
}
725725

726726
fn arrayref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMArrayRef> {
@@ -843,7 +843,7 @@ impl ArrayRef {
843843

844844
pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
845845
let gc_ref = self.inner.try_gc_ref(store)?;
846-
let header = store.gc_store()?.header(gc_ref);
846+
let header = store.require_gc_store()?.header(gc_ref);
847847
debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
848848
Ok(header.ty().expect("arrayrefs should have concrete types"))
849849
}

crates/wasmtime/src/runtime/gc/enabled/eqref.rs

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,7 @@ impl EqRef {
198198
return Ok(HeapType::I31);
199199
}
200200

201-
let header = store.gc_store()?.header(gc_ref);
201+
let header = store.require_gc_store()?.header(gc_ref);
202202

203203
if header.kind().matches(VMGcKind::StructRef) {
204204
return Ok(HeapType::ConcreteStruct(
@@ -320,7 +320,11 @@ impl EqRef {
320320

321321
pub(crate) fn _is_struct(&self, store: &StoreOpaque) -> Result<bool> {
322322
let gc_ref = self.inner.try_gc_ref(store)?;
323-
Ok(!gc_ref.is_i31() && store.gc_store()?.kind(gc_ref).matches(VMGcKind::StructRef))
323+
Ok(!gc_ref.is_i31()
324+
&& store
325+
.require_gc_store()?
326+
.kind(gc_ref)
327+
.matches(VMGcKind::StructRef))
324328
}
325329

326330
/// Downcast this `eqref` to a `structref`.
@@ -384,7 +388,11 @@ impl EqRef {
384388

385389
pub(crate) fn _is_array(&self, store: &StoreOpaque) -> Result<bool> {
386390
let gc_ref = self.inner.try_gc_ref(store)?;
387-
Ok(!gc_ref.is_i31() && store.gc_store()?.kind(gc_ref).matches(VMGcKind::ArrayRef))
391+
Ok(!gc_ref.is_i31()
392+
&& store
393+
.require_gc_store()?
394+
.kind(gc_ref)
395+
.matches(VMGcKind::ArrayRef))
388396
}
389397

390398
/// Downcast this `eqref` to an `arrayref`.

crates/wasmtime/src/runtime/gc/enabled/exnref.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ impl ExnRef {
166166
// (Not actually memory unsafe since we have indexed GC heaps.)
167167
pub(crate) fn _from_raw(store: &mut AutoAssertNoGc, raw: u32) -> Option<Rooted<Self>> {
168168
let gc_ref = VMGcRef::from_raw_u32(raw)?;
169-
let gc_ref = store.unwrap_gc_store_mut().clone_gc_ref(&gc_ref);
169+
let gc_ref = store.clone_gc_ref(&gc_ref);
170170
Some(Self::from_cloned_gc_ref(store, gc_ref))
171171
}
172172

@@ -335,7 +335,7 @@ impl ExnRef {
335335
// Allocate the exn and write each field value into the appropriate
336336
// offset.
337337
let exnref = store
338-
.gc_store_mut()?
338+
.require_gc_store_mut()?
339339
.alloc_uninit_exn(allocator.type_index(), &allocator.layout())
340340
.context("unrecoverable error when allocating new `exnref`")?
341341
.map_err(|n| GcHeapOutOfMemory::new((), n))?;
@@ -361,15 +361,15 @@ impl ExnRef {
361361
})() {
362362
Ok(()) => Ok(Rooted::new(&mut store, exnref.into())),
363363
Err(e) => {
364-
store.gc_store_mut()?.dealloc_uninit_exn(exnref);
364+
store.require_gc_store_mut()?.dealloc_uninit_exn(exnref);
365365
Err(e)
366366
}
367367
}
368368
}
369369

370370
pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
371371
let gc_ref = self.inner.try_gc_ref(store)?;
372-
let header = store.gc_store()?.header(gc_ref);
372+
let header = store.require_gc_store()?.header(gc_ref);
373373
debug_assert!(header.kind().matches(VMGcKind::ExnRef));
374374
Ok(header.ty().expect("exnrefs should have concrete types"))
375375
}
@@ -421,7 +421,7 @@ impl ExnRef {
421421
let raw = if gc_ref.is_i31() {
422422
gc_ref.as_raw_non_zero_u32()
423423
} else {
424-
store.gc_store_mut()?.expose_gc_ref_to_wasm(gc_ref)
424+
store.require_gc_store_mut()?.expose_gc_ref_to_wasm(gc_ref)
425425
};
426426
Ok(raw.get())
427427
}
@@ -501,7 +501,7 @@ impl ExnRef {
501501
let store = AutoAssertNoGc::new(store);
502502

503503
let gc_ref = self.inner.try_gc_ref(&store)?;
504-
let header = store.gc_store()?.header(gc_ref);
504+
let header = store.require_gc_store()?.header(gc_ref);
505505
debug_assert!(header.kind().matches(VMGcKind::ExnRef));
506506

507507
let index = header.ty().expect("exnrefs should have concrete types");
@@ -554,7 +554,7 @@ impl ExnRef {
554554
fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
555555
assert!(self.comes_from_same_store(&store));
556556
let gc_ref = self.inner.try_gc_ref(store)?;
557-
Ok(store.gc_store()?.header(gc_ref))
557+
Ok(store.require_gc_store()?.header(gc_ref))
558558
}
559559

560560
fn exnref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMExnRef> {

crates/wasmtime/src/runtime/gc/enabled/externref.rs

Lines changed: 18 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -229,7 +229,7 @@ impl ExternRef {
229229
let gc_ref = store
230230
.retry_after_gc(value, |store, value| {
231231
store
232-
.gc_store_mut()?
232+
.require_gc_store_mut()?
233233
.alloc_externref(value)
234234
.context("unrecoverable error when allocating new `externref`")?
235235
.map_err(|(x, n)| GcHeapOutOfMemory::new(x, n).into())
@@ -346,7 +346,7 @@ impl ExternRef {
346346
let gc_ref = store
347347
.retry_after_gc_async(value, |store, value| {
348348
store
349-
.gc_store_mut()?
349+
.require_gc_store_mut()?
350350
.alloc_externref(value)
351351
.context("unrecoverable error when allocating new `externref`")?
352352
.map_err(|(x, n)| GcHeapOutOfMemory::new(x, n).into())
@@ -435,11 +435,13 @@ impl ExternRef {
435435
store: &mut AutoAssertNoGc<'_>,
436436
gc_ref: VMGcRef,
437437
) -> Rooted<Self> {
438-
assert!(
439-
gc_ref.is_extern_ref(&*store.unwrap_gc_store().gc_heap)
440-
|| gc_ref.is_any_ref(&*store.unwrap_gc_store().gc_heap),
441-
"GC reference {gc_ref:#p} should be an externref or anyref"
442-
);
438+
if !gc_ref.is_i31() {
439+
assert!(
440+
gc_ref.is_extern_ref(&*store.unwrap_gc_store().gc_heap)
441+
|| gc_ref.is_any_ref(&*store.unwrap_gc_store().gc_heap),
442+
"GC reference {gc_ref:#p} should be an externref or anyref"
443+
);
444+
}
443445
Rooted::new(store, gc_ref)
444446
}
445447

@@ -481,7 +483,10 @@ impl ExternRef {
481483
{
482484
let store = store.into().0;
483485
let gc_ref = self.inner.try_gc_ref(&store)?;
484-
let gc_store = store.gc_store()?;
486+
if gc_ref.is_i31() {
487+
return Ok(None);
488+
}
489+
let gc_store = store.require_gc_store()?;
485490
if let Some(externref) = gc_ref.as_externref(&*gc_store.gc_heap) {
486491
Ok(Some(gc_store.externref_host_data(externref)))
487492
} else {
@@ -532,7 +537,10 @@ impl ExternRef {
532537
// so that we can get the store's GC store. But importantly we cannot
533538
// trigger a GC while we are working with `gc_ref` here.
534539
let gc_ref = self.inner.try_gc_ref(store)?.unchecked_copy();
535-
let gc_store = store.gc_store_mut()?;
540+
if gc_ref.is_i31() {
541+
return Ok(None);
542+
}
543+
let gc_store = store.require_gc_store_mut()?;
536544
if let Some(externref) = gc_ref.as_externref(&*gc_store.gc_heap) {
537545
Ok(Some(gc_store.externref_host_data_mut(externref)))
538546
} else {
@@ -579,7 +587,7 @@ impl ExternRef {
579587
// (Not actually memory unsafe since we have indexed GC heaps.)
580588
pub(crate) fn _from_raw(store: &mut AutoAssertNoGc, raw: u32) -> Option<Rooted<ExternRef>> {
581589
let gc_ref = VMGcRef::from_raw_u32(raw)?;
582-
let gc_ref = store.unwrap_gc_store_mut().clone_gc_ref(&gc_ref);
590+
let gc_ref = store.clone_gc_ref(&gc_ref);
583591
Some(Self::from_cloned_gc_ref(store, gc_ref))
584592
}
585593

0 commit comments

Comments
 (0)