Skip to content

Commit 00a1780

Browse files
authored
Remove some unnecessary UnsafeCells in GC code (bytecodealliance#10461)
* Remove some unnecessary `UnsafeCell`s in GC code None of these are actually mutated while behind shared references. Everything, including interactions with JIT code, obeys Rust's borrowing rules. Running tests under MIRI still passes, verifying these claims. Therefore, we can remove these `UnsafeCell`s, which makes the code much easier to read and much less footgun-y since there is less `unsafe` code and less working with `UnsafeCell`s' raw pointers. * Use `SendSyncPtr` in DRC collector runtime code * Use `Mmap::slice[_mut]` methods
1 parent 6a8d3d5 commit 00a1780

File tree

3 files changed

+46
-85
lines changed

3 files changed

+46
-85
lines changed

crates/wasmtime/src/runtime/vm/gc/enabled/drc.rs

Lines changed: 42 additions & 78 deletions
Original file line numberDiff line numberDiff line change
@@ -50,15 +50,14 @@ use crate::runtime::vm::{
5050
GcHeapObject, GcProgress, GcRootsIter, GcRuntime, Mmap, TypedGcRef, VMExternRef, VMGcHeader,
5151
VMGcRef,
5252
};
53+
use crate::vm::SendSyncPtr;
5354
use core::{
5455
alloc::Layout,
5556
any::Any,
56-
cell::UnsafeCell,
5757
mem,
5858
num::NonZeroUsize,
5959
ops::{Deref, DerefMut, Range},
60-
ptr::{self, NonNull},
61-
slice,
60+
ptr::NonNull,
6261
};
6362
use wasmtime_environ::drc::DrcTypeLayouts;
6463
use wasmtime_environ::{GcArrayLayout, GcStructLayout, GcTypeLayouts, VMGcKind, VMSharedTypeIndex};
@@ -168,17 +167,12 @@ impl DrcHeap {
168167
let drc_ref = drc_ref(gc_ref);
169168
let header = self.index_mut(&drc_ref);
170169
debug_assert_ne!(
171-
*header.ref_count.get_mut(),
172-
0,
170+
header.ref_count, 0,
173171
"{:#p} is supposedly live; should have nonzero ref count",
174172
*gc_ref
175173
);
176-
*header.ref_count.get_mut() += 1;
177-
log::trace!(
178-
"increment {:#p} ref count -> {}",
179-
*gc_ref,
180-
header.ref_count.get_mut()
181-
);
174+
header.ref_count += 1;
175+
log::trace!("increment {:#p} ref count -> {}", *gc_ref, header.ref_count);
182176
}
183177

184178
/// Decrement the ref count for the associated object.
@@ -193,18 +187,13 @@ impl DrcHeap {
193187
let drc_ref = drc_ref(gc_ref);
194188
let header = self.index_mut(drc_ref);
195189
debug_assert_ne!(
196-
*header.ref_count.get_mut(),
197-
0,
190+
header.ref_count, 0,
198191
"{:#p} is supposedly live; should have nonzero ref count",
199192
*gc_ref
200193
);
201-
*header.ref_count.get_mut() -= 1;
202-
log::trace!(
203-
"decrement {:#p} ref count -> {}",
204-
*gc_ref,
205-
header.ref_count.get_mut()
206-
);
207-
*header.ref_count.get_mut() == 0
194+
header.ref_count -= 1;
195+
log::trace!("decrement {:#p} ref count -> {}", *gc_ref, header.ref_count);
196+
header.ref_count == 0
208197
}
209198

210199
/// Decrement the ref count for the associated object.
@@ -269,15 +258,7 @@ impl DrcHeap {
269258
.chunks(mem::size_of::<u32>())
270259
.filter_map(|chunk| {
271260
assert_eq!(chunk.len(), 4);
272-
273-
let bytes = unsafe {
274-
[
275-
*chunk[0].get(),
276-
*chunk[1].get(),
277-
*chunk[2].get(),
278-
*chunk[3].get(),
279-
]
280-
};
261+
let bytes = [chunk[0], chunk[1], chunk[2], chunk[3]];
281262

282263
// Values are always little-endian inside the GC heap.
283264
let raw = u32::from_le_bytes(bytes);
@@ -329,7 +310,7 @@ impl DrcHeap {
329310
}
330311

331312
debug_assert_ne!(
332-
*self.index_mut(drc_ref(&gc_ref)).ref_count.get_mut(),
313+
self.index_mut(drc_ref(&gc_ref)).ref_count,
333314
0,
334315
"{gc_ref:#p} is on the Wasm stack and therefore should be held \
335316
by the activations table; should have nonzero ref count",
@@ -353,10 +334,7 @@ impl DrcHeap {
353334
.chunk
354335
.iter_mut()
355336
.take(num_filled)
356-
.map(|slot| {
357-
let raw = *slot.get_mut();
358-
VMGcRef::from_raw_u32(raw).expect("non-null")
359-
})
337+
.map(|slot| VMGcRef::from_raw_u32(*slot).expect("non-null"))
360338
}
361339

362340
#[inline(never)]
@@ -383,14 +361,14 @@ impl DrcHeap {
383361
// borrows.
384362
let mut alloc = mem::take(&mut self.activations_table.alloc);
385363
for slot in alloc.chunk.iter_mut().take(num_filled) {
386-
let raw = mem::take(slot.get_mut());
364+
let raw = mem::take(slot);
387365
let gc_ref = VMGcRef::from_raw_u32(raw).expect("non-null");
388366
f(self, gc_ref);
389-
*slot.get_mut() = 0;
367+
*slot = 0;
390368
}
391369

392370
debug_assert!(
393-
alloc.chunk.iter_mut().all(|slot| *slot.get_mut() == 0),
371+
alloc.chunk.iter().all(|slot| *slot == 0),
394372
"after sweeping the bump chunk, all slots should be empty",
395373
);
396374

@@ -491,16 +469,10 @@ fn externref_to_drc(externref: &VMExternRef) -> &TypedGcRef<VMDrcExternRef> {
491469
#[repr(C)]
492470
struct VMDrcHeader {
493471
header: VMGcHeader,
494-
ref_count: UnsafeCell<u64>,
472+
ref_count: u64,
495473
object_size: u32,
496474
}
497475

498-
// Although this contains an `UnsafeCell`, that is just for allowing the field
499-
// to be written to by JIT code, and it is only read/modified when we have
500-
// access to an appropriate borrow of the heap.
501-
unsafe impl Send for VMDrcHeader {}
502-
unsafe impl Sync for VMDrcHeader {}
503-
504476
unsafe impl GcHeapObject for VMDrcHeader {
505477
#[inline]
506478
fn is(_header: &VMGcHeader) -> bool {
@@ -644,7 +616,7 @@ unsafe impl GcHeap for DrcHeap {
644616

645617
*self.index_mut(drc_ref(&gc_ref)) = VMDrcHeader {
646618
header,
647-
ref_count: UnsafeCell::new(1),
619+
ref_count: 1,
648620
object_size: size,
649621
};
650622
log::trace!("new object: increment {gc_ref:#p} ref count -> 1");
@@ -799,16 +771,14 @@ unsafe impl GcHeap for DrcHeap {
799771
debug_assert!(dec_ref_stack.as_ref().is_some_and(|s| s.is_empty()));
800772
}
801773

802-
fn heap_slice(&self) -> &[UnsafeCell<u8>] {
803-
let ptr = self.heap.as_ptr().cast();
774+
fn heap_slice(&self) -> &[u8] {
804775
let len = self.heap.len();
805-
unsafe { slice::from_raw_parts(ptr, len) }
776+
unsafe { self.heap.slice(0..len) }
806777
}
807778

808779
fn heap_slice_mut(&mut self) -> &mut [u8] {
809-
let ptr = self.heap.as_mut_ptr();
810780
let len = self.heap.len();
811-
unsafe { slice::from_raw_parts_mut(ptr, len) }
781+
unsafe { self.heap.slice_mut(0..len) }
812782
}
813783
}
814784

@@ -850,7 +820,7 @@ impl<'a> GarbageCollection<'a> for DrcCollection<'a> {
850820
/// The type of `VMGcRefActivationsTable`'s bump region's elements.
851821
///
852822
/// These are written to by Wasm.
853-
type TableElem = UnsafeCell<u32>;
823+
type TableElem = u32;
854824

855825
/// A table that over-approximizes the set of `VMGcRef`s that any Wasm
856826
/// activation on this thread is currently using.
@@ -893,15 +863,14 @@ struct VMGcRefActivationsTable {
893863
struct VMGcRefTableAlloc {
894864
/// Bump-allocation finger within the `chunk`.
895865
///
896-
/// NB: this is an `UnsafeCell` because it is written to by compiled Wasm
897-
/// code.
898-
next: UnsafeCell<NonNull<TableElem>>,
866+
/// NB: this is written to by compiled Wasm code.
867+
next: SendSyncPtr<TableElem>,
899868

900869
/// Pointer to just after the `chunk`.
901870
///
902871
/// This is *not* within the current chunk and therefore is not a valid
903872
/// place to insert a reference!
904-
end: NonNull<TableElem>,
873+
end: SendSyncPtr<TableElem>,
905874

906875
/// Bump allocation chunk that stores fast-path insertions.
907876
///
@@ -920,8 +889,8 @@ impl Default for VMGcRefTableAlloc {
920889
let next = chunk.as_mut_ptr();
921890
let end = unsafe { next.add(chunk.len()) };
922891
VMGcRefTableAlloc {
923-
next: UnsafeCell::new(NonNull::new(next).unwrap()),
924-
end: NonNull::new(end).unwrap(),
892+
next: SendSyncPtr::new(NonNull::new(next).unwrap()),
893+
end: SendSyncPtr::new(NonNull::new(end).unwrap()),
925894
chunk,
926895
}
927896
}
@@ -934,24 +903,20 @@ impl VMGcRefTableAlloc {
934903
/// Force the lazy allocation of this bump region.
935904
fn force_allocation(&mut self) {
936905
assert!(self.chunk.is_empty());
937-
self.chunk = (0..Self::CHUNK_SIZE).map(|_| UnsafeCell::new(0)).collect();
906+
self.chunk = (0..Self::CHUNK_SIZE).map(|_| 0).collect();
938907
self.reset();
939908
}
940909

941910
/// Reset this bump region, retaining any underlying allocation, but moving
942911
/// the bump pointer and limit to their default positions.
943912
fn reset(&mut self) {
944-
self.next = UnsafeCell::new(NonNull::new(self.chunk.as_mut_ptr()).unwrap());
945-
self.end = NonNull::new(unsafe { self.chunk.as_mut_ptr().add(self.chunk.len()) }).unwrap();
913+
self.next = SendSyncPtr::new(NonNull::new(self.chunk.as_mut_ptr()).unwrap());
914+
self.end = SendSyncPtr::new(
915+
NonNull::new(unsafe { self.chunk.as_mut_ptr().add(self.chunk.len()) }).unwrap(),
916+
);
946917
}
947918
}
948919

949-
// This gets around the usage of `UnsafeCell` throughout the internals of this
950-
// allocator, but the storage should all be Send/Sync and synchronization isn't
951-
// necessary since operations require `&mut self`.
952-
unsafe impl Send for VMGcRefTableAlloc {}
953-
unsafe impl Sync for VMGcRefTableAlloc {}
954-
955920
fn _assert_send_sync() {
956921
fn _assert<T: Send + Sync>() {}
957922
_assert::<VMGcRefActivationsTable>();
@@ -990,8 +955,8 @@ impl VMGcRefActivationsTable {
990955
#[inline]
991956
fn bump_capacity_remaining(&self) -> usize {
992957
let end = self.alloc.end.as_ptr() as usize;
993-
let next = unsafe { *self.alloc.next.get() };
994-
end - next.as_ptr() as usize
958+
let next = self.alloc.next.as_ptr() as usize;
959+
end - next
995960
}
996961

997962
/// Try and insert a `VMGcRef` into this table.
@@ -1006,21 +971,20 @@ impl VMGcRefActivationsTable {
1006971
#[inline]
1007972
fn try_insert(&mut self, gc_ref: VMGcRef) -> Result<(), VMGcRef> {
1008973
unsafe {
1009-
let next = *self.alloc.next.get();
1010-
if next == self.alloc.end {
974+
if self.alloc.next == self.alloc.end {
1011975
return Err(gc_ref);
1012976
}
1013977

1014978
debug_assert_eq!(
1015-
(*next.as_ref().get()),
979+
self.alloc.next.as_non_null().read(),
1016980
0,
1017981
"slots >= the `next` bump finger are always `None`"
1018982
);
1019-
ptr::write(next.as_ptr(), UnsafeCell::new(gc_ref.as_raw_u32()));
983+
self.alloc.next.as_non_null().write(gc_ref.as_raw_u32());
1020984

1021-
let next = NonNull::new_unchecked(next.as_ptr().add(1));
1022-
debug_assert!(next <= self.alloc.end);
1023-
*self.alloc.next.get() = next;
985+
let next = SendSyncPtr::new(NonNull::new(self.alloc.next.as_ptr().add(1)).unwrap());
986+
debug_assert!(next.as_ptr() <= self.alloc.end.as_ptr());
987+
self.alloc.next = next;
1024988

1025989
Ok(())
1026990
}
@@ -1040,7 +1004,7 @@ impl VMGcRefActivationsTable {
10401004
}
10411005

10421006
fn num_filled_in_bump_chunk(&self) -> usize {
1043-
let next = unsafe { *self.alloc.next.get() };
1007+
let next = self.alloc.next;
10441008
let bytes_unused = (self.alloc.end.as_ptr() as usize) - (next.as_ptr() as usize);
10451009
let slots_unused = bytes_unused / mem::size_of::<TableElem>();
10461010
self.alloc.chunk.len().saturating_sub(slots_unused)
@@ -1055,7 +1019,7 @@ impl VMGcRefActivationsTable {
10551019
// filled-in slots.
10561020
let num_filled = self.num_filled_in_bump_chunk();
10571021
for slot in self.alloc.chunk.iter().take(num_filled) {
1058-
if let Some(elem) = VMGcRef::from_raw_u32(unsafe { *slot.get() }) {
1022+
if let Some(elem) = VMGcRef::from_raw_u32(*slot) {
10591023
f(&elem);
10601024
}
10611025
}
@@ -1124,7 +1088,7 @@ mod tests {
11241088
fn ref_count_is_at_correct_offset() {
11251089
let extern_data = VMDrcHeader {
11261090
header: VMGcHeader::externref(),
1127-
ref_count: UnsafeCell::new(0),
1091+
ref_count: 0,
11281092
object_size: 0,
11291093
};
11301094

crates/wasmtime/src/runtime/vm/gc/enabled/null.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@ use core::ptr::NonNull;
1818
use core::{
1919
alloc::Layout,
2020
any::Any,
21-
cell::UnsafeCell,
2221
num::{NonZeroU32, NonZeroUsize},
2322
};
2423
use wasmtime_environ::{
@@ -202,8 +201,8 @@ unsafe impl GcHeap for NullHeap {
202201
self.no_gc_count -= 1;
203202
}
204203

205-
fn heap_slice(&self) -> &[UnsafeCell<u8>] {
206-
let ptr = self.heap.as_ptr().cast();
204+
fn heap_slice(&self) -> &[u8] {
205+
let ptr = self.heap.as_ptr();
207206
let len = self.heap.len();
208207
unsafe { core::slice::from_raw_parts(ptr, len) }
209208
}

crates/wasmtime/src/runtime/vm/gc/gc_runtime.rs

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,7 @@ use crate::runtime::vm::{
66
VMExternRef, VMGcHeader, VMGcObjectDataMut, VMGcRef, VMStructRef,
77
};
88
use core::ptr::NonNull;
9-
use core::{
10-
alloc::Layout, any::Any, cell::UnsafeCell, marker, mem, num::NonZeroUsize, ops::Range, ptr,
11-
};
9+
use core::{alloc::Layout, any::Any, marker, mem, num::NonZeroUsize, ops::Range, ptr};
1210
use wasmtime_environ::{GcArrayLayout, GcStructLayout, GcTypeLayouts, VMSharedTypeIndex};
1311

1412
/// Trait for integrating a garbage collector with the runtime.
@@ -387,7 +385,7 @@ pub unsafe trait GcHeap: 'static + Send + Sync {
387385
/// The heap slice must be the GC heap region, and the region must remain
388386
/// valid (i.e. not moved or resized) for JIT code until `self` is dropped
389387
/// or `self.reset()` is called.
390-
fn heap_slice(&self) -> &[UnsafeCell<u8>];
388+
fn heap_slice(&self) -> &[u8];
391389

392390
/// Get a mutable slice of the raw bytes of the GC heap.
393391
///

0 commit comments

Comments
 (0)