Skip to content

Commit c234f58

Browse files
committed
improve lifetime safety and add more gc branding, done by AI
1 parent e03994e commit c234f58

File tree

14 files changed

+250
-175
lines changed

14 files changed

+250
-175
lines changed

crates/dotnet-value/src/layout.rs

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@ impl LayoutManager {
116116
LayoutManager::Scalar(Scalar::ManagedPtr) => {
117117
// ManagedPtr in memory is 16 bytes: (Owner ObjectRef, Offset).
118118
// We need to trace the owner at offset 0.
119-
let ptr_size = size_of::<usize>();
119+
let ptr_size = ObjectRef::SIZE;
120120
unsafe { ObjectRef::read_unchecked(&storage[0..ptr_size]) }.trace(cc);
121121
}
122122
LayoutManager::Field(f) => {
@@ -135,18 +135,19 @@ impl LayoutManager {
135135
pub fn resurrect<'gc>(
136136
&self,
137137
storage: &[u8],
138-
fc: &gc_arena::Finalization<'gc>,
138+
fc: &'gc gc_arena::Finalization<'gc>,
139139
visited: &mut HashSet<usize>,
140140
) {
141141
match self {
142142
LayoutManager::Scalar(Scalar::ObjectRef) => {
143-
unsafe { ObjectRef::read_unchecked(storage) }.resurrect(fc, visited);
143+
unsafe { ObjectRef::read_branded(storage, fc) }.resurrect(fc, visited);
144144
}
145145
LayoutManager::Scalar(Scalar::ManagedPtr) => {
146146
// ManagedPtr in memory is 16 bytes: (Owner ObjectRef, Offset).
147147
// We need to resurrect the owner at offset 0.
148148
let ptr_size = size_of::<usize>();
149-
unsafe { ObjectRef::read_unchecked(&storage[0..ptr_size]) }.resurrect(fc, visited);
149+
unsafe { ObjectRef::read_branded(&storage[0..ptr_size], fc) }
150+
.resurrect(fc, visited);
150151
}
151152
LayoutManager::Field(f) => {
152153
for field in f.fields.values() {
@@ -266,7 +267,7 @@ impl FieldLayoutManager {
266267
pub fn resurrect<'gc>(
267268
&self,
268269
storage: &[u8],
269-
fc: &gc_arena::Finalization<'gc>,
270+
fc: &'gc gc_arena::Finalization<'gc>,
270271
visited: &mut HashSet<usize>,
271272
) {
272273
for field in self.fields.values() {
@@ -310,7 +311,7 @@ impl ArrayLayoutManager {
310311
pub fn resurrect<'gc>(
311312
&self,
312313
storage: &[u8],
313-
fc: &gc_arena::Finalization<'gc>,
314+
fc: &'gc gc_arena::Finalization<'gc>,
314315
visited: &mut HashSet<usize>,
315316
) {
316317
let elem_size = self.element_layout.size();

crates/dotnet-value/src/lib.rs

Lines changed: 72 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -209,10 +209,9 @@ macro_rules! wrapping_arithmetic_op {
209209

210210
impl<'gc> StackValue<'gc> {
211211
pub fn unmanaged_ptr(ptr: *mut u8) -> Self {
212-
if ptr.is_null() {
213-
Self::NativeInt(0)
214-
} else {
215-
Self::UnmanagedPtr(UnmanagedPtr(NonNull::new(ptr).unwrap()))
212+
match NonNull::new(ptr) {
213+
Some(p) => Self::UnmanagedPtr(UnmanagedPtr(p)),
214+
None => Self::NativeInt(0),
216215
}
217216
}
218217
pub fn managed_ptr(ptr: *mut u8, target_type: TypeDescription, pinned: bool) -> Self {
@@ -288,10 +287,10 @@ impl<'gc> StackValue<'gc> {
288287
match self {
289288
Self::NativeInt(i) => *i as *mut u8,
290289
Self::UnmanagedPtr(UnmanagedPtr(p)) => p.as_ptr(),
291-
Self::ManagedPtr(m) => m
292-
.pointer()
293-
.map(|p| p.as_ptr())
294-
.unwrap_or(std::ptr::null_mut()),
290+
Self::ManagedPtr(m) => match m.pointer() {
291+
Some(p) => p.as_ptr(),
292+
None => std::ptr::null_mut(),
293+
},
295294
v => panic!("expected pointer on stack, received {:?}", v),
296295
}
297296
}
@@ -413,45 +412,44 @@ impl<'gc> StackValue<'gc> {
413412
/// Note: This uses `AtomicT::from_ptr` which is supported in recent Rust versions.
414413
/// Also, it does not ensure that the appropriate locks are held for the memory being accessed.
415414
pub unsafe fn load_atomic(ptr: *const u8, t: LoadType, ordering: AtomicOrdering) -> Self {
416-
unsafe {
417-
debug_assert!(!ptr.is_null(), "Attempted to load from a null pointer");
418-
let alignment = load_type_alignment(t);
419-
debug_assert!(
420-
(ptr as usize).is_multiple_of(alignment),
421-
"Attempted to load from an unaligned pointer {:?} for type {:?}",
422-
ptr,
423-
t
424-
);
425-
426-
let size = match t {
427-
LoadType::Int8 | LoadType::UInt8 => 1,
428-
LoadType::Int16 | LoadType::UInt16 => 2,
429-
LoadType::Int32 | LoadType::UInt32 | LoadType::Float32 => 4,
430-
LoadType::Int64 | LoadType::Float64 => 8,
431-
LoadType::IntPtr => size_of::<isize>(),
432-
LoadType::Object => size_of::<usize>(),
433-
};
434-
435-
let val = StandardAtomicAccess::load_atomic(ptr, size, ordering);
436-
437-
match t {
438-
LoadType::Int8 => Self::Int32(val as i8 as i32),
439-
LoadType::UInt8 => Self::Int32(val as u8 as i32),
440-
LoadType::Int16 => Self::Int32(val as i16 as i32),
441-
LoadType::UInt16 => Self::Int32(val as u16 as i32),
442-
LoadType::Int32 | LoadType::UInt32 => Self::Int32(val as i32),
443-
LoadType::Int64 => Self::Int64(val as i64),
444-
LoadType::Float32 => Self::NativeFloat(f32::from_bits(val as u32) as f64),
445-
LoadType::Float64 => Self::NativeFloat(f64::from_bits(val)),
446-
LoadType::IntPtr => Self::NativeInt(val as isize),
447-
LoadType::Object => {
448-
let ptr = val as usize as *const ThreadSafeLock<object::ObjectInner<'gc>>;
449-
if ptr.is_null() {
450-
Self::ObjectRef(ObjectRef(None))
451-
} else {
452-
Self::ObjectRef(ObjectRef(Some(Gc::from_ptr(ptr))))
453-
}
454-
}
415+
debug_assert!(!ptr.is_null(), "Attempted to load from a null pointer");
416+
let alignment = load_type_alignment(t);
417+
debug_assert!(
418+
(ptr as usize).is_multiple_of(alignment),
419+
"Attempted to load from an unaligned pointer {:?} for type {:?}",
420+
ptr,
421+
t
422+
);
423+
424+
let size = match t {
425+
LoadType::Int8 | LoadType::UInt8 => 1,
426+
LoadType::Int16 | LoadType::UInt16 => 2,
427+
LoadType::Int32 | LoadType::UInt32 | LoadType::Float32 => 4,
428+
LoadType::Int64 | LoadType::Float64 => 8,
429+
LoadType::IntPtr => size_of::<isize>(),
430+
LoadType::Object => size_of::<usize>(),
431+
};
432+
433+
let val = unsafe { StandardAtomicAccess::load_atomic(ptr, size, ordering) };
434+
435+
match t {
436+
LoadType::Int8 => Self::Int32(val as i8 as i32),
437+
LoadType::UInt8 => Self::Int32(val as u8 as i32),
438+
LoadType::Int16 => Self::Int32(val as i16 as i32),
439+
LoadType::UInt16 => Self::Int32(val as u16 as i32),
440+
LoadType::Int32 | LoadType::UInt32 => Self::Int32(val as i32),
441+
LoadType::Int64 => Self::Int64(val as i64),
442+
LoadType::Float32 => Self::NativeFloat(f32::from_bits(val as u32) as f64),
443+
LoadType::Float64 => Self::NativeFloat(f64::from_bits(val)),
444+
LoadType::IntPtr => Self::NativeInt(val as isize),
445+
LoadType::Object => {
446+
let ptr = val as usize as *const ThreadSafeLock<object::ObjectInner<'gc>>;
447+
let obj = if ptr.is_null() {
448+
None
449+
} else {
450+
Some(unsafe { Gc::from_ptr(ptr) })
451+
};
452+
Self::ObjectRef(ObjectRef(obj))
455453
}
456454
}
457455
}
@@ -470,34 +468,34 @@ impl<'gc> StackValue<'gc> {
470468
/// Note: This uses `AtomicT::from_ptr` which is supported in recent Rust versions.
471469
/// Also, it does not ensure that the appropriate locks are held for the memory being accessed.
472470
pub unsafe fn store_atomic(self, ptr: *mut u8, t: StoreType, ordering: AtomicOrdering) {
473-
unsafe {
474-
debug_assert!(!ptr.is_null(), "Attempted to store to a null pointer");
475-
let alignment = store_type_alignment(t);
476-
debug_assert!(
477-
(ptr as usize).is_multiple_of(alignment),
478-
"Attempted to store to an unaligned pointer {:?} for type {:?}",
479-
ptr,
480-
t
481-
);
482-
483-
let (val, size) = match t {
484-
StoreType::Int8 => (self.as_i32() as u64, 1),
485-
StoreType::Int16 => (self.as_i32() as u64, 2),
486-
StoreType::Int32 => (self.as_i32() as u64, 4),
487-
StoreType::Int64 => (self.as_i64() as u64, 8),
488-
StoreType::Float32 => ((self.as_f64() as f32).to_bits() as u64, 4),
489-
StoreType::Float64 => (self.as_f64().to_bits(), 8),
490-
StoreType::IntPtr => (self.as_isize() as u64, size_of::<isize>()),
491-
StoreType::Object => {
492-
let obj = self.as_object_ref();
493-
let val = match obj.0 {
494-
Some(h) => Gc::as_ptr(h) as usize,
495-
None => 0,
496-
};
497-
(val as u64, size_of::<usize>())
498-
}
499-
};
471+
debug_assert!(!ptr.is_null(), "Attempted to store to a null pointer");
472+
let alignment = store_type_alignment(t);
473+
debug_assert!(
474+
(ptr as usize).is_multiple_of(alignment),
475+
"Attempted to store to an unaligned pointer {:?} for type {:?}",
476+
ptr,
477+
t
478+
);
479+
480+
let (val, size) = match t {
481+
StoreType::Int8 => (self.as_i32() as u64, 1),
482+
StoreType::Int16 => (self.as_i32() as u64, 2),
483+
StoreType::Int32 => (self.as_i32() as u64, 4),
484+
StoreType::Int64 => (self.as_i64() as u64, 8),
485+
StoreType::Float32 => ((self.as_f64() as f32).to_bits() as u64, 4),
486+
StoreType::Float64 => (self.as_f64().to_bits(), 8),
487+
StoreType::IntPtr => (self.as_isize() as u64, size_of::<isize>()),
488+
StoreType::Object => {
489+
let obj = self.as_object_ref();
490+
let val = match obj.0 {
491+
Some(h) => Gc::as_ptr(h) as usize,
492+
None => 0,
493+
};
494+
(val as u64, size_of::<usize>())
495+
}
496+
};
500497

498+
unsafe {
501499
StandardAtomicAccess::store_atomic(ptr, size, val, ordering);
502500
}
503501
}

crates/dotnet-value/src/object.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ impl Hash for ObjectRef<'_> {
132132
impl<'gc> ObjectRef<'gc> {
133133
pub const SIZE: usize = size_of::<ObjectRef>();
134134

135-
pub fn resurrect(&self, fc: &gc_arena::Finalization<'gc>, visited: &mut HashSet<usize>) {
135+
pub fn resurrect(&self, fc: &'gc gc_arena::Finalization<'gc>, visited: &mut HashSet<usize>) {
136136
if let Some(handle) = self.0 {
137137
let ptr = Gc::as_ptr(handle) as usize;
138138
if visited.insert(ptr) {
@@ -363,7 +363,7 @@ impl<'gc> HeapStorage<'gc> {
363363
}
364364
}
365365

366-
pub fn resurrect(&self, fc: &gc_arena::Finalization<'gc>, visited: &mut HashSet<usize>) {
366+
pub fn resurrect(&self, fc: &'gc gc_arena::Finalization<'gc>, visited: &mut HashSet<usize>) {
367367
match self {
368368
HeapStorage::Vec(v) => v.resurrect(fc, visited),
369369
HeapStorage::Obj(o) => o.resurrect(fc, visited),
@@ -436,7 +436,7 @@ impl<'gc> ValueType<'gc> {
436436
}
437437
}
438438

439-
pub fn resurrect(&self, fc: &gc_arena::Finalization<'gc>, visited: &mut HashSet<usize>) {
439+
pub fn resurrect(&self, fc: &'gc gc_arena::Finalization<'gc>, visited: &mut HashSet<usize>) {
440440
match self {
441441
ValueType::Pointer(p) => p.resurrect(fc, visited),
442442
ValueType::Struct(o) => o.resurrect(fc, visited),
@@ -579,7 +579,7 @@ impl<'gc> Vector<'gc> {
579579
size_of::<Vector>() + self.storage.len() + (self.dims.len() * size_of::<usize>())
580580
}
581581

582-
pub fn resurrect(&self, fc: &gc_arena::Finalization<'gc>, visited: &mut HashSet<usize>) {
582+
pub fn resurrect(&self, fc: &'gc gc_arena::Finalization<'gc>, visited: &mut HashSet<usize>) {
583583
self.layout.resurrect(&self.storage, fc, visited);
584584
}
585585

@@ -674,7 +674,7 @@ impl<'gc> Object<'gc> {
674674
size_of::<Object>() + self.instance_storage.get().len()
675675
}
676676

677-
pub fn resurrect(&self, fc: &gc_arena::Finalization<'gc>, visited: &mut HashSet<usize>) {
677+
pub fn resurrect(&self, fc: &'gc gc_arena::Finalization<'gc>, visited: &mut HashSet<usize>) {
678678
self.instance_storage.resurrect(fc, visited);
679679
}
680680

0 commit comments

Comments
 (0)